VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 100843

Last change on this file since 100843 was 100843, checked in by vboxsync, 16 months ago

VMM/IEM: More conversion from IEM_MC_MEM_MAP to IEM_MC_MEM_MAP_XXX. Correct the other 32-bit sized BT instruction to not clear high bits. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 514.1 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 100843 2023-08-09 20:09:44Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1229 }
1230
1231 /* Ignore operand size here, memory refs are always 16-bit. */
1232 IEM_MC_BEGIN(2, 0);
1233 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEM_MC_BEGIN(2, 0);
1314 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 IEMOP_HLP_MIN_286();
1341 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1342}
1343
1344
1345/** Opcode 0x0f 0x00 /5. */
1346FNIEMOPRM_DEF(iemOp_Grp6_verw)
1347{
1348 IEMOP_MNEMONIC(verw, "verw Ew");
1349 IEMOP_HLP_MIN_286();
1350 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1351}
1352
1353
1354/**
1355 * Group 6 jump table.
1356 */
1357IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1358{
1359 iemOp_Grp6_sldt,
1360 iemOp_Grp6_str,
1361 iemOp_Grp6_lldt,
1362 iemOp_Grp6_ltr,
1363 iemOp_Grp6_verr,
1364 iemOp_Grp6_verw,
1365 iemOp_InvalidWithRM,
1366 iemOp_InvalidWithRM
1367};
1368
1369/** Opcode 0x0f 0x00. */
1370FNIEMOP_DEF(iemOp_Grp6)
1371{
1372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1373 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1379{
1380 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1381 IEMOP_HLP_MIN_286();
1382 IEMOP_HLP_64BIT_OP_SIZE();
1383 IEM_MC_BEGIN(2, 1);
1384 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1385 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1389 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1390 IEM_MC_END();
1391}
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395FNIEMOP_DEF(iemOp_Grp7_vmcall)
1396{
1397 IEMOP_MNEMONIC(vmcall, "vmcall");
1398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1399
1400 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1401 want all hypercalls regardless of instruction used, and if a
1402 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1403 (NEM/win makes ASSUMPTIONS about this behavior.) */
1404 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_vmcall);
1405}
1406
1407
1408/** Opcode 0x0f 0x01 /0. */
1409#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1410FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1411{
1412 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1413 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1414 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1415 IEMOP_HLP_DONE_DECODING();
1416 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1417 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1418 iemCImpl_vmlaunch);
1419}
1420#else
1421FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1422{
1423 IEMOP_BITCH_ABOUT_STUB();
1424 IEMOP_RAISE_INVALID_OPCODE_RET();
1425}
1426#endif
1427
1428
1429/** Opcode 0x0f 0x01 /0. */
1430#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1431FNIEMOP_DEF(iemOp_Grp7_vmresume)
1432{
1433 IEMOP_MNEMONIC(vmresume, "vmresume");
1434 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1435 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1436 IEMOP_HLP_DONE_DECODING();
1437 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1438 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1439 iemCImpl_vmresume);
1440}
1441#else
1442FNIEMOP_DEF(iemOp_Grp7_vmresume)
1443{
1444 IEMOP_BITCH_ABOUT_STUB();
1445 IEMOP_RAISE_INVALID_OPCODE_RET();
1446}
1447#endif
1448
1449
1450/** Opcode 0x0f 0x01 /0. */
1451#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1452FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1453{
1454 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1455 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1456 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1457 IEMOP_HLP_DONE_DECODING();
1458 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmxoff);
1459}
1460#else
1461FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1462{
1463 IEMOP_BITCH_ABOUT_STUB();
1464 IEMOP_RAISE_INVALID_OPCODE_RET();
1465}
1466#endif
1467
1468
1469/** Opcode 0x0f 0x01 /1. */
1470FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1471{
1472 IEMOP_MNEMONIC(sidt, "sidt Ms");
1473 IEMOP_HLP_MIN_286();
1474 IEMOP_HLP_64BIT_OP_SIZE();
1475 IEM_MC_BEGIN(2, 1);
1476 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1477 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1480 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1481 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1482 IEM_MC_END();
1483}
1484
1485
1486/** Opcode 0x0f 0x01 /1. */
1487FNIEMOP_DEF(iemOp_Grp7_monitor)
1488{
1489 IEMOP_MNEMONIC(monitor, "monitor");
1490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1491 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1492}
1493
1494
1495/** Opcode 0x0f 0x01 /1. */
1496FNIEMOP_DEF(iemOp_Grp7_mwait)
1497{
1498 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1500 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_mwait);
1501}
1502
1503
1504/** Opcode 0x0f 0x01 /2. */
1505FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1506{
1507 IEMOP_MNEMONIC(lgdt, "lgdt");
1508 IEMOP_HLP_64BIT_OP_SIZE();
1509 IEM_MC_BEGIN(3, 1);
1510 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1511 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1512 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1515 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1516 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1517 IEM_MC_END();
1518}
1519
1520
1521/** Opcode 0x0f 0x01 0xd0. */
1522FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1523{
1524 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1525 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1526 {
1527 /** @todo r=ramshankar: We should use
1528 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1529 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1530/** @todo testcase: test prefixes and exceptions. currently not checking for the
1531 * OPSIZE one ... */
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1533 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_xgetbv);
1534 }
1535 IEMOP_RAISE_INVALID_OPCODE_RET();
1536}
1537
1538
1539/** Opcode 0x0f 0x01 0xd1. */
1540FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1541{
1542 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1543 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1544 {
1545 /** @todo r=ramshankar: We should use
1546 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1547 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1548/** @todo testcase: test prefixes and exceptions. currently not checking for the
1549 * OPSIZE one ... */
1550 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1551 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_xsetbv);
1552 }
1553 IEMOP_RAISE_INVALID_OPCODE_RET();
1554}
1555
1556
1557/** Opcode 0x0f 0x01 /3. */
1558FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1559{
1560 IEMOP_MNEMONIC(lidt, "lidt");
1561 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1562 IEM_MC_BEGIN(3, 1);
1563 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1564 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1565 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1568 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1569 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1570 IEM_MC_END();
1571}
1572
1573
1574/** Opcode 0x0f 0x01 0xd8. */
1575#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1576FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1577{
1578 IEMOP_MNEMONIC(vmrun, "vmrun");
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1580 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1581 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1582 iemCImpl_vmrun);
1583}
1584#else
1585FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1586#endif
1587
1588/** Opcode 0x0f 0x01 0xd9. */
1589FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1590{
1591 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1592 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1593 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1594 * here cannot be right... */
1595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1596
1597 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1598 want all hypercalls regardless of instruction used, and if a
1599 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1600 (NEM/win makes ASSUMPTIONS about this behavior.) */
1601 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmmcall);
1602}
1603
1604/** Opcode 0x0f 0x01 0xda. */
1605#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1606FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1607{
1608 IEMOP_MNEMONIC(vmload, "vmload");
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1610 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmload);
1611}
1612#else
1613FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1614#endif
1615
1616
1617/** Opcode 0x0f 0x01 0xdb. */
1618#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1619FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1620{
1621 IEMOP_MNEMONIC(vmsave, "vmsave");
1622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1623 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmsave);
1624}
1625#else
1626FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1627#endif
1628
1629
1630/** Opcode 0x0f 0x01 0xdc. */
1631#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1632FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1633{
1634 IEMOP_MNEMONIC(stgi, "stgi");
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1636 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_stgi);
1637}
1638#else
1639FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1640#endif
1641
1642
1643/** Opcode 0x0f 0x01 0xdd. */
1644#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1645FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1646{
1647 IEMOP_MNEMONIC(clgi, "clgi");
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1649 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clgi);
1650}
1651#else
1652FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1653#endif
1654
1655
1656/** Opcode 0x0f 0x01 0xdf. */
1657#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1658FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1659{
1660 IEMOP_MNEMONIC(invlpga, "invlpga");
1661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1662 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpga);
1663}
1664#else
1665FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1666#endif
1667
1668
1669/** Opcode 0x0f 0x01 0xde. */
1670#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1671FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1672{
1673 IEMOP_MNEMONIC(skinit, "skinit");
1674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1675 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_skinit);
1676}
1677#else
1678FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1679#endif
1680
1681
1682/** Opcode 0x0f 0x01 /4. */
1683FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1684{
1685 IEMOP_MNEMONIC(smsw, "smsw");
1686 IEMOP_HLP_MIN_286();
1687 if (IEM_IS_MODRM_REG_MODE(bRm))
1688 {
1689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1690 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1691 }
1692
1693 /* Ignore operand size here, memory refs are always 16-bit. */
1694 IEM_MC_BEGIN(2, 0);
1695 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1696 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1699 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1700 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1701 IEM_MC_END();
1702}
1703
1704
1705/** Opcode 0x0f 0x01 /6. */
1706FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1707{
1708 /* The operand size is effectively ignored, all is 16-bit and only the
1709 lower 3-bits are used. */
1710 IEMOP_MNEMONIC(lmsw, "lmsw");
1711 IEMOP_HLP_MIN_286();
1712 if (IEM_IS_MODRM_REG_MODE(bRm))
1713 {
1714 IEM_MC_BEGIN(2, 0);
1715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1716 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1717 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1718 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1719 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1720 IEM_MC_END();
1721 }
1722 else
1723 {
1724 IEM_MC_BEGIN(2, 0);
1725 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1726 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1729 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1730 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1731 IEM_MC_END();
1732 }
1733}
1734
1735
1736/** Opcode 0x0f 0x01 /7. */
1737FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1738{
1739 IEMOP_MNEMONIC(invlpg, "invlpg");
1740 IEMOP_HLP_MIN_486();
1741 IEM_MC_BEGIN(1, 1);
1742 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1745 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1746 IEM_MC_END();
1747}
1748
1749
1750/** Opcode 0x0f 0x01 0xf8. */
1751FNIEMOP_DEF(iemOp_Grp7_swapgs)
1752{
1753 IEMOP_MNEMONIC(swapgs, "swapgs");
1754 IEMOP_HLP_ONLY_64BIT();
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_swapgs);
1757}
1758
1759
1760/** Opcode 0x0f 0x01 0xf9. */
1761FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1762{
1763 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1765 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtscp);
1766}
1767
1768
1769/**
1770 * Group 7 jump table, memory variant.
1771 */
1772IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1773{
1774 iemOp_Grp7_sgdt,
1775 iemOp_Grp7_sidt,
1776 iemOp_Grp7_lgdt,
1777 iemOp_Grp7_lidt,
1778 iemOp_Grp7_smsw,
1779 iemOp_InvalidWithRM,
1780 iemOp_Grp7_lmsw,
1781 iemOp_Grp7_invlpg
1782};
1783
1784
1785/** Opcode 0x0f 0x01. */
1786FNIEMOP_DEF(iemOp_Grp7)
1787{
1788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1789 if (IEM_IS_MODRM_MEM_MODE(bRm))
1790 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1791
1792 switch (IEM_GET_MODRM_REG_8(bRm))
1793 {
1794 case 0:
1795 switch (IEM_GET_MODRM_RM_8(bRm))
1796 {
1797 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1798 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1799 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1800 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1801 }
1802 IEMOP_RAISE_INVALID_OPCODE_RET();
1803
1804 case 1:
1805 switch (IEM_GET_MODRM_RM_8(bRm))
1806 {
1807 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1808 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1809 }
1810 IEMOP_RAISE_INVALID_OPCODE_RET();
1811
1812 case 2:
1813 switch (IEM_GET_MODRM_RM_8(bRm))
1814 {
1815 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1816 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1817 }
1818 IEMOP_RAISE_INVALID_OPCODE_RET();
1819
1820 case 3:
1821 switch (IEM_GET_MODRM_RM_8(bRm))
1822 {
1823 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1824 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1825 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1826 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1827 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1828 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1829 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1830 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1832 }
1833
1834 case 4:
1835 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1836
1837 case 5:
1838 IEMOP_RAISE_INVALID_OPCODE_RET();
1839
1840 case 6:
1841 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1842
1843 case 7:
1844 switch (IEM_GET_MODRM_RM_8(bRm))
1845 {
1846 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1847 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1848 }
1849 IEMOP_RAISE_INVALID_OPCODE_RET();
1850
1851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1852 }
1853}
1854
1855/** Opcode 0x0f 0x00 /3. */
1856FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1857{
1858 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1860
1861 if (IEM_IS_MODRM_REG_MODE(bRm))
1862 {
1863 switch (pVCpu->iem.s.enmEffOpSize)
1864 {
1865 case IEMMODE_16BIT:
1866 {
1867 IEM_MC_BEGIN(3, 0);
1868 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1870 IEM_MC_ARG(uint16_t, u16Sel, 1);
1871 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1872
1873 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1874 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1875 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1876
1877 IEM_MC_END();
1878 }
1879
1880 case IEMMODE_32BIT:
1881 case IEMMODE_64BIT:
1882 {
1883 IEM_MC_BEGIN(3, 0);
1884 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1885 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1886 IEM_MC_ARG(uint16_t, u16Sel, 1);
1887 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1888
1889 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1890 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1891 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1892
1893 IEM_MC_END();
1894 }
1895
1896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1897 }
1898 }
1899 else
1900 {
1901 switch (pVCpu->iem.s.enmEffOpSize)
1902 {
1903 case IEMMODE_16BIT:
1904 {
1905 IEM_MC_BEGIN(3, 1);
1906 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1907 IEM_MC_ARG(uint16_t, u16Sel, 1);
1908 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1910
1911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1912 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1913
1914 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1916 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1917
1918 IEM_MC_END();
1919 }
1920
1921 case IEMMODE_32BIT:
1922 case IEMMODE_64BIT:
1923 {
1924 IEM_MC_BEGIN(3, 1);
1925 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1926 IEM_MC_ARG(uint16_t, u16Sel, 1);
1927 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1929
1930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1931 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1932/** @todo testcase: make sure it's a 16-bit read. */
1933
1934 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1935 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1936 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1937
1938 IEM_MC_END();
1939 }
1940
1941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1942 }
1943 }
1944}
1945
1946
1947
1948/** Opcode 0x0f 0x02. */
1949FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1950{
1951 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1952 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1953}
1954
1955
1956/** Opcode 0x0f 0x03. */
1957FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1958{
1959 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1960 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1961}
1962
1963
1964/** Opcode 0x0f 0x05. */
1965FNIEMOP_DEF(iemOp_syscall)
1966{
1967 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1969 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1970 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1971 iemCImpl_syscall);
1972}
1973
1974
1975/** Opcode 0x0f 0x06. */
1976FNIEMOP_DEF(iemOp_clts)
1977{
1978 IEMOP_MNEMONIC(clts, "clts");
1979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1980 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clts);
1981}
1982
1983
1984/** Opcode 0x0f 0x07. */
1985FNIEMOP_DEF(iemOp_sysret)
1986{
1987 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1989 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1990 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1991 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1992}
1993
1994
1995/** Opcode 0x0f 0x08. */
1996FNIEMOP_DEF(iemOp_invd)
1997{
1998 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1999 IEMOP_HLP_MIN_486();
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invd);
2002}
2003
2004
2005/** Opcode 0x0f 0x09. */
2006FNIEMOP_DEF(iemOp_wbinvd)
2007{
2008 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2009 IEMOP_HLP_MIN_486();
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wbinvd);
2012}
2013
2014
2015/** Opcode 0x0f 0x0b. */
2016FNIEMOP_DEF(iemOp_ud2)
2017{
2018 IEMOP_MNEMONIC(ud2, "ud2");
2019 IEMOP_RAISE_INVALID_OPCODE_RET();
2020}
2021
2022/** Opcode 0x0f 0x0d. */
2023FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2024{
2025 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2026 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2027 {
2028 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2029 IEMOP_RAISE_INVALID_OPCODE_RET();
2030 }
2031
2032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2033 if (IEM_IS_MODRM_REG_MODE(bRm))
2034 {
2035 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2036 IEMOP_RAISE_INVALID_OPCODE_RET();
2037 }
2038
2039 switch (IEM_GET_MODRM_REG_8(bRm))
2040 {
2041 case 2: /* Aliased to /0 for the time being. */
2042 case 4: /* Aliased to /0 for the time being. */
2043 case 5: /* Aliased to /0 for the time being. */
2044 case 6: /* Aliased to /0 for the time being. */
2045 case 7: /* Aliased to /0 for the time being. */
2046 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2047 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2048 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2050 }
2051
2052 IEM_MC_BEGIN(0, 1);
2053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2056 /* Currently a NOP. */
2057 NOREF(GCPtrEffSrc);
2058 IEM_MC_ADVANCE_RIP_AND_FINISH();
2059 IEM_MC_END();
2060}
2061
2062
2063/** Opcode 0x0f 0x0e. */
2064FNIEMOP_DEF(iemOp_femms)
2065{
2066 IEMOP_MNEMONIC(femms, "femms");
2067
2068 IEM_MC_BEGIN(0,0);
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2073 IEM_MC_FPU_FROM_MMX_MODE();
2074 IEM_MC_ADVANCE_RIP_AND_FINISH();
2075 IEM_MC_END();
2076}
2077
2078
2079/** Opcode 0x0f 0x0f. */
2080FNIEMOP_DEF(iemOp_3Dnow)
2081{
2082 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2083 {
2084 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2085 IEMOP_RAISE_INVALID_OPCODE_RET();
2086 }
2087
2088#ifdef IEM_WITH_3DNOW
2089 /* This is pretty sparse, use switch instead of table. */
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2092#else
2093 IEMOP_BITCH_ABOUT_STUB();
2094 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2095#endif
2096}
2097
2098
2099/**
2100 * @opcode 0x10
2101 * @oppfx none
2102 * @opcpuid sse
2103 * @opgroup og_sse_simdfp_datamove
2104 * @opxcpttype 4UA
2105 * @optest op1=1 op2=2 -> op1=2
2106 * @optest op1=0 op2=-22 -> op1=-22
2107 */
2108FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2109{
2110 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2112 if (IEM_IS_MODRM_REG_MODE(bRm))
2113 {
2114 /*
2115 * XMM128, XMM128.
2116 */
2117 IEM_MC_BEGIN(0, 0);
2118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2120 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2121 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2122 IEM_GET_MODRM_RM(pVCpu, bRm));
2123 IEM_MC_ADVANCE_RIP_AND_FINISH();
2124 IEM_MC_END();
2125 }
2126 else
2127 {
2128 /*
2129 * XMM128, [mem128].
2130 */
2131 IEM_MC_BEGIN(0, 2);
2132 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2134
2135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2137 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2138 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2139
2140 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2141 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2142
2143 IEM_MC_ADVANCE_RIP_AND_FINISH();
2144 IEM_MC_END();
2145 }
2146
2147}
2148
2149
2150/**
2151 * @opcode 0x10
2152 * @oppfx 0x66
2153 * @opcpuid sse2
2154 * @opgroup og_sse2_pcksclr_datamove
2155 * @opxcpttype 4UA
2156 * @optest op1=1 op2=2 -> op1=2
2157 * @optest op1=0 op2=-42 -> op1=-42
2158 */
2159FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2160{
2161 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2163 if (IEM_IS_MODRM_REG_MODE(bRm))
2164 {
2165 /*
2166 * XMM128, XMM128.
2167 */
2168 IEM_MC_BEGIN(0, 0);
2169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2170 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2173 IEM_GET_MODRM_RM(pVCpu, bRm));
2174 IEM_MC_ADVANCE_RIP_AND_FINISH();
2175 IEM_MC_END();
2176 }
2177 else
2178 {
2179 /*
2180 * XMM128, [mem128].
2181 */
2182 IEM_MC_BEGIN(0, 2);
2183 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2185
2186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2188 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2189 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2190
2191 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2192 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2193
2194 IEM_MC_ADVANCE_RIP_AND_FINISH();
2195 IEM_MC_END();
2196 }
2197}
2198
2199
2200/**
2201 * @opcode 0x10
2202 * @oppfx 0xf3
2203 * @opcpuid sse
2204 * @opgroup og_sse_simdfp_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-22 -> op1=-22
2208 */
2209FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2210{
2211 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_REG_MODE(bRm))
2214 {
2215 /*
2216 * XMM32, XMM32.
2217 */
2218 IEM_MC_BEGIN(0, 1);
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2220 IEM_MC_LOCAL(uint32_t, uSrc);
2221
2222 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2224 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2225 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2226
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * XMM128, [mem32].
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(uint32_t, uSrc);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2241 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2246
2247 IEM_MC_ADVANCE_RIP_AND_FINISH();
2248 IEM_MC_END();
2249 }
2250}
2251
2252
2253/**
2254 * @opcode 0x10
2255 * @oppfx 0xf2
2256 * @opcpuid sse2
2257 * @opgroup og_sse2_pcksclr_datamove
2258 * @opxcpttype 5
2259 * @optest op1=1 op2=2 -> op1=2
2260 * @optest op1=0 op2=-42 -> op1=-42
2261 */
2262FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2263{
2264 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2266 if (IEM_IS_MODRM_REG_MODE(bRm))
2267 {
2268 /*
2269 * XMM64, XMM64.
2270 */
2271 IEM_MC_BEGIN(0, 1);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2273 IEM_MC_LOCAL(uint64_t, uSrc);
2274
2275 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2277 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2278 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2279
2280 IEM_MC_ADVANCE_RIP_AND_FINISH();
2281 IEM_MC_END();
2282 }
2283 else
2284 {
2285 /*
2286 * XMM128, [mem64].
2287 */
2288 IEM_MC_BEGIN(0, 2);
2289 IEM_MC_LOCAL(uint64_t, uSrc);
2290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2291
2292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2294 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2295 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2296
2297 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2298 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2299
2300 IEM_MC_ADVANCE_RIP_AND_FINISH();
2301 IEM_MC_END();
2302 }
2303}
2304
2305
2306/**
2307 * @opcode 0x11
2308 * @oppfx none
2309 * @opcpuid sse
2310 * @opgroup og_sse_simdfp_datamove
2311 * @opxcpttype 4UA
2312 * @optest op1=1 op2=2 -> op1=2
2313 * @optest op1=0 op2=-42 -> op1=-42
2314 */
2315FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2316{
2317 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2319 if (IEM_IS_MODRM_REG_MODE(bRm))
2320 {
2321 /*
2322 * XMM128, XMM128.
2323 */
2324 IEM_MC_BEGIN(0, 0);
2325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2326 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2327 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2328 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2329 IEM_GET_MODRM_REG(pVCpu, bRm));
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 }
2333 else
2334 {
2335 /*
2336 * [mem128], XMM128.
2337 */
2338 IEM_MC_BEGIN(0, 2);
2339 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2341
2342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2344 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2345 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2346
2347 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2348 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2349
2350 IEM_MC_ADVANCE_RIP_AND_FINISH();
2351 IEM_MC_END();
2352 }
2353}
2354
2355
2356/**
2357 * @opcode 0x11
2358 * @oppfx 0x66
2359 * @opcpuid sse2
2360 * @opgroup og_sse2_pcksclr_datamove
2361 * @opxcpttype 4UA
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-42 -> op1=-42
2364 */
2365FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if (IEM_IS_MODRM_REG_MODE(bRm))
2370 {
2371 /*
2372 * XMM128, XMM128.
2373 */
2374 IEM_MC_BEGIN(0, 0);
2375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2376 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2378 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2379 IEM_GET_MODRM_REG(pVCpu, bRm));
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * [mem128], XMM128.
2387 */
2388 IEM_MC_BEGIN(0, 2);
2389 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2396
2397 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2398 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2399
2400 IEM_MC_ADVANCE_RIP_AND_FINISH();
2401 IEM_MC_END();
2402 }
2403}
2404
2405
2406/**
2407 * @opcode 0x11
2408 * @oppfx 0xf3
2409 * @opcpuid sse
2410 * @opgroup og_sse_simdfp_datamove
2411 * @opxcpttype 5
2412 * @optest op1=1 op2=2 -> op1=2
2413 * @optest op1=0 op2=-22 -> op1=-22
2414 */
2415FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2416{
2417 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2419 if (IEM_IS_MODRM_REG_MODE(bRm))
2420 {
2421 /*
2422 * XMM32, XMM32.
2423 */
2424 IEM_MC_BEGIN(0, 1);
2425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427
2428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2430 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2431 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2432
2433 IEM_MC_ADVANCE_RIP_AND_FINISH();
2434 IEM_MC_END();
2435 }
2436 else
2437 {
2438 /*
2439 * [mem32], XMM32.
2440 */
2441 IEM_MC_BEGIN(0, 2);
2442 IEM_MC_LOCAL(uint32_t, uSrc);
2443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2444
2445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2449
2450 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2451 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2452
2453 IEM_MC_ADVANCE_RIP_AND_FINISH();
2454 IEM_MC_END();
2455 }
2456}
2457
2458
2459/**
2460 * @opcode 0x11
2461 * @oppfx 0xf2
2462 * @opcpuid sse2
2463 * @opgroup og_sse2_pcksclr_datamove
2464 * @opxcpttype 5
2465 * @optest op1=1 op2=2 -> op1=2
2466 * @optest op1=0 op2=-42 -> op1=-42
2467 */
2468FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2469{
2470 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472 if (IEM_IS_MODRM_REG_MODE(bRm))
2473 {
2474 /*
2475 * XMM64, XMM64.
2476 */
2477 IEM_MC_BEGIN(0, 1);
2478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2479 IEM_MC_LOCAL(uint64_t, uSrc);
2480
2481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2483 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2484 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2485
2486 IEM_MC_ADVANCE_RIP_AND_FINISH();
2487 IEM_MC_END();
2488 }
2489 else
2490 {
2491 /*
2492 * [mem64], XMM64.
2493 */
2494 IEM_MC_BEGIN(0, 2);
2495 IEM_MC_LOCAL(uint64_t, uSrc);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2500 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2502
2503 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2504 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2505
2506 IEM_MC_ADVANCE_RIP_AND_FINISH();
2507 IEM_MC_END();
2508 }
2509}
2510
2511
2512FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2513{
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 if (IEM_IS_MODRM_REG_MODE(bRm))
2516 {
2517 /**
2518 * @opcode 0x12
2519 * @opcodesub 11 mr/reg
2520 * @oppfx none
2521 * @opcpuid sse
2522 * @opgroup og_sse_simdfp_datamove
2523 * @opxcpttype 5
2524 * @optest op1=1 op2=2 -> op1=2
2525 * @optest op1=0 op2=-42 -> op1=-42
2526 */
2527 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2528
2529 IEM_MC_BEGIN(0, 1);
2530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2531 IEM_MC_LOCAL(uint64_t, uSrc);
2532
2533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2535 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2536 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2537
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 }
2541 else
2542 {
2543 /**
2544 * @opdone
2545 * @opcode 0x12
2546 * @opcodesub !11 mr/reg
2547 * @oppfx none
2548 * @opcpuid sse
2549 * @opgroup og_sse_simdfp_datamove
2550 * @opxcpttype 5
2551 * @optest op1=1 op2=2 -> op1=2
2552 * @optest op1=0 op2=-42 -> op1=-42
2553 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2554 */
2555 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2556
2557 IEM_MC_BEGIN(0, 2);
2558 IEM_MC_LOCAL(uint64_t, uSrc);
2559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2560
2561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2565
2566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2567 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2568
2569 IEM_MC_ADVANCE_RIP_AND_FINISH();
2570 IEM_MC_END();
2571 }
2572}
2573
2574
2575/**
2576 * @opcode 0x12
2577 * @opcodesub !11 mr/reg
2578 * @oppfx 0x66
2579 * @opcpuid sse2
2580 * @opgroup og_sse2_pcksclr_datamove
2581 * @opxcpttype 5
2582 * @optest op1=1 op2=2 -> op1=2
2583 * @optest op1=0 op2=-42 -> op1=-42
2584 */
2585FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2586{
2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2588 if (IEM_IS_MODRM_MEM_MODE(bRm))
2589 {
2590 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2591
2592 IEM_MC_BEGIN(0, 2);
2593 IEM_MC_LOCAL(uint64_t, uSrc);
2594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2595
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2600
2601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2602 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2603
2604 IEM_MC_ADVANCE_RIP_AND_FINISH();
2605 IEM_MC_END();
2606 }
2607
2608 /**
2609 * @opdone
2610 * @opmnemonic ud660f12m3
2611 * @opcode 0x12
2612 * @opcodesub 11 mr/reg
2613 * @oppfx 0x66
2614 * @opunused immediate
2615 * @opcpuid sse
2616 * @optest ->
2617 */
2618 else
2619 IEMOP_RAISE_INVALID_OPCODE_RET();
2620}
2621
2622
2623/**
2624 * @opcode 0x12
2625 * @oppfx 0xf3
2626 * @opcpuid sse3
2627 * @opgroup og_sse3_pcksclr_datamove
2628 * @opxcpttype 4
2629 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2630 * op1=0x00000002000000020000000100000001
2631 */
2632FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2633{
2634 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 if (IEM_IS_MODRM_REG_MODE(bRm))
2637 {
2638 /*
2639 * XMM, XMM.
2640 */
2641 IEM_MC_BEGIN(0, 1);
2642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2643 IEM_MC_LOCAL(RTUINT128U, uSrc);
2644
2645 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2646 IEM_MC_PREPARE_SSE_USAGE();
2647
2648 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2649 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2650 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2651 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2652 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2653
2654 IEM_MC_ADVANCE_RIP_AND_FINISH();
2655 IEM_MC_END();
2656 }
2657 else
2658 {
2659 /*
2660 * XMM, [mem128].
2661 */
2662 IEM_MC_BEGIN(0, 2);
2663 IEM_MC_LOCAL(RTUINT128U, uSrc);
2664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2665
2666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2668 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2669 IEM_MC_PREPARE_SSE_USAGE();
2670
2671 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2672 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2673 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2674 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2675 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2676
2677 IEM_MC_ADVANCE_RIP_AND_FINISH();
2678 IEM_MC_END();
2679 }
2680}
2681
2682
2683/**
2684 * @opcode 0x12
2685 * @oppfx 0xf2
2686 * @opcpuid sse3
2687 * @opgroup og_sse3_pcksclr_datamove
2688 * @opxcpttype 5
2689 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2690 * op1=0x22222222111111112222222211111111
2691 */
2692FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2693{
2694 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2696 if (IEM_IS_MODRM_REG_MODE(bRm))
2697 {
2698 /*
2699 * XMM128, XMM64.
2700 */
2701 IEM_MC_BEGIN(1, 0);
2702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2703 IEM_MC_ARG(uint64_t, uSrc, 0);
2704
2705 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2706 IEM_MC_PREPARE_SSE_USAGE();
2707
2708 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2709 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2710 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2711
2712 IEM_MC_ADVANCE_RIP_AND_FINISH();
2713 IEM_MC_END();
2714 }
2715 else
2716 {
2717 /*
2718 * XMM128, [mem64].
2719 */
2720 IEM_MC_BEGIN(1, 1);
2721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2722 IEM_MC_ARG(uint64_t, uSrc, 0);
2723
2724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2726 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2727 IEM_MC_PREPARE_SSE_USAGE();
2728
2729 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2730 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2731 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2732
2733 IEM_MC_ADVANCE_RIP_AND_FINISH();
2734 IEM_MC_END();
2735 }
2736}
2737
2738
2739/**
2740 * @opcode 0x13
2741 * @opcodesub !11 mr/reg
2742 * @oppfx none
2743 * @opcpuid sse
2744 * @opgroup og_sse_simdfp_datamove
2745 * @opxcpttype 5
2746 * @optest op1=1 op2=2 -> op1=2
2747 * @optest op1=0 op2=-42 -> op1=-42
2748 */
2749FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2750{
2751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2752 if (IEM_IS_MODRM_MEM_MODE(bRm))
2753 {
2754 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2755
2756 IEM_MC_BEGIN(0, 2);
2757 IEM_MC_LOCAL(uint64_t, uSrc);
2758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2759
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2764
2765 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2766 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2767
2768 IEM_MC_ADVANCE_RIP_AND_FINISH();
2769 IEM_MC_END();
2770 }
2771
2772 /**
2773 * @opdone
2774 * @opmnemonic ud0f13m3
2775 * @opcode 0x13
2776 * @opcodesub 11 mr/reg
2777 * @oppfx none
2778 * @opunused immediate
2779 * @opcpuid sse
2780 * @optest ->
2781 */
2782 else
2783 IEMOP_RAISE_INVALID_OPCODE_RET();
2784}
2785
2786
2787/**
2788 * @opcode 0x13
2789 * @opcodesub !11 mr/reg
2790 * @oppfx 0x66
2791 * @opcpuid sse2
2792 * @opgroup og_sse2_pcksclr_datamove
2793 * @opxcpttype 5
2794 * @optest op1=1 op2=2 -> op1=2
2795 * @optest op1=0 op2=-42 -> op1=-42
2796 */
2797FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2798{
2799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2800 if (IEM_IS_MODRM_MEM_MODE(bRm))
2801 {
2802 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2803 IEM_MC_BEGIN(0, 2);
2804 IEM_MC_LOCAL(uint64_t, uSrc);
2805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2806
2807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2809 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2811
2812 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2813 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2814
2815 IEM_MC_ADVANCE_RIP_AND_FINISH();
2816 IEM_MC_END();
2817 }
2818
2819 /**
2820 * @opdone
2821 * @opmnemonic ud660f13m3
2822 * @opcode 0x13
2823 * @opcodesub 11 mr/reg
2824 * @oppfx 0x66
2825 * @opunused immediate
2826 * @opcpuid sse
2827 * @optest ->
2828 */
2829 else
2830 IEMOP_RAISE_INVALID_OPCODE_RET();
2831}
2832
2833
2834/**
2835 * @opmnemonic udf30f13
2836 * @opcode 0x13
2837 * @oppfx 0xf3
2838 * @opunused intel-modrm
2839 * @opcpuid sse
2840 * @optest ->
2841 * @opdone
2842 */
2843
2844/**
2845 * @opmnemonic udf20f13
2846 * @opcode 0x13
2847 * @oppfx 0xf2
2848 * @opunused intel-modrm
2849 * @opcpuid sse
2850 * @optest ->
2851 * @opdone
2852 */
2853
2854/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2855FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2856{
2857 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2858 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2859}
2860
2861
2862/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2863FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2864{
2865 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2866 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2867}
2868
2869
2870/**
2871 * @opdone
2872 * @opmnemonic udf30f14
2873 * @opcode 0x14
2874 * @oppfx 0xf3
2875 * @opunused intel-modrm
2876 * @opcpuid sse
2877 * @optest ->
2878 * @opdone
2879 */
2880
2881/**
2882 * @opmnemonic udf20f14
2883 * @opcode 0x14
2884 * @oppfx 0xf2
2885 * @opunused intel-modrm
2886 * @opcpuid sse
2887 * @optest ->
2888 * @opdone
2889 */
2890
2891/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2892FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2893{
2894 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2895 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2896}
2897
2898
2899/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2900FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2901{
2902 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2903 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2904}
2905
2906
2907/* Opcode 0xf3 0x0f 0x15 - invalid */
2908/* Opcode 0xf2 0x0f 0x15 - invalid */
2909
2910/**
2911 * @opdone
2912 * @opmnemonic udf30f15
2913 * @opcode 0x15
2914 * @oppfx 0xf3
2915 * @opunused intel-modrm
2916 * @opcpuid sse
2917 * @optest ->
2918 * @opdone
2919 */
2920
2921/**
2922 * @opmnemonic udf20f15
2923 * @opcode 0x15
2924 * @oppfx 0xf2
2925 * @opunused intel-modrm
2926 * @opcpuid sse
2927 * @optest ->
2928 * @opdone
2929 */
2930
2931FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2932{
2933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2934 if (IEM_IS_MODRM_REG_MODE(bRm))
2935 {
2936 /**
2937 * @opcode 0x16
2938 * @opcodesub 11 mr/reg
2939 * @oppfx none
2940 * @opcpuid sse
2941 * @opgroup og_sse_simdfp_datamove
2942 * @opxcpttype 5
2943 * @optest op1=1 op2=2 -> op1=2
2944 * @optest op1=0 op2=-42 -> op1=-42
2945 */
2946 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2947
2948 IEM_MC_BEGIN(0, 1);
2949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2950 IEM_MC_LOCAL(uint64_t, uSrc);
2951
2952 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2953 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2954 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2955 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2956
2957 IEM_MC_ADVANCE_RIP_AND_FINISH();
2958 IEM_MC_END();
2959 }
2960 else
2961 {
2962 /**
2963 * @opdone
2964 * @opcode 0x16
2965 * @opcodesub !11 mr/reg
2966 * @oppfx none
2967 * @opcpuid sse
2968 * @opgroup og_sse_simdfp_datamove
2969 * @opxcpttype 5
2970 * @optest op1=1 op2=2 -> op1=2
2971 * @optest op1=0 op2=-42 -> op1=-42
2972 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2973 */
2974 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2975
2976 IEM_MC_BEGIN(0, 2);
2977 IEM_MC_LOCAL(uint64_t, uSrc);
2978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2979
2980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2984
2985 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2986 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991}
2992
2993
2994/**
2995 * @opcode 0x16
2996 * @opcodesub !11 mr/reg
2997 * @oppfx 0x66
2998 * @opcpuid sse2
2999 * @opgroup og_sse2_pcksclr_datamove
3000 * @opxcpttype 5
3001 * @optest op1=1 op2=2 -> op1=2
3002 * @optest op1=0 op2=-42 -> op1=-42
3003 */
3004FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3005{
3006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3007 if (IEM_IS_MODRM_MEM_MODE(bRm))
3008 {
3009 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3010 IEM_MC_BEGIN(0, 2);
3011 IEM_MC_LOCAL(uint64_t, uSrc);
3012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3013
3014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3016 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3017 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3018
3019 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3020 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3021
3022 IEM_MC_ADVANCE_RIP_AND_FINISH();
3023 IEM_MC_END();
3024 }
3025
3026 /**
3027 * @opdone
3028 * @opmnemonic ud660f16m3
3029 * @opcode 0x16
3030 * @opcodesub 11 mr/reg
3031 * @oppfx 0x66
3032 * @opunused immediate
3033 * @opcpuid sse
3034 * @optest ->
3035 */
3036 else
3037 IEMOP_RAISE_INVALID_OPCODE_RET();
3038}
3039
3040
3041/**
3042 * @opcode 0x16
3043 * @oppfx 0xf3
3044 * @opcpuid sse3
3045 * @opgroup og_sse3_pcksclr_datamove
3046 * @opxcpttype 4
3047 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3048 * op1=0x00000002000000020000000100000001
3049 */
3050FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3051{
3052 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3054 if (IEM_IS_MODRM_REG_MODE(bRm))
3055 {
3056 /*
3057 * XMM128, XMM128.
3058 */
3059 IEM_MC_BEGIN(0, 1);
3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3061 IEM_MC_LOCAL(RTUINT128U, uSrc);
3062
3063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3064 IEM_MC_PREPARE_SSE_USAGE();
3065
3066 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3067 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3068 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3069 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3070 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3071
3072 IEM_MC_ADVANCE_RIP_AND_FINISH();
3073 IEM_MC_END();
3074 }
3075 else
3076 {
3077 /*
3078 * XMM128, [mem128].
3079 */
3080 IEM_MC_BEGIN(0, 2);
3081 IEM_MC_LOCAL(RTUINT128U, uSrc);
3082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3083
3084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3086 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3087 IEM_MC_PREPARE_SSE_USAGE();
3088
3089 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3090 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3091 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3092 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3093 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3094
3095 IEM_MC_ADVANCE_RIP_AND_FINISH();
3096 IEM_MC_END();
3097 }
3098}
3099
3100/**
3101 * @opdone
3102 * @opmnemonic udf30f16
3103 * @opcode 0x16
3104 * @oppfx 0xf2
3105 * @opunused intel-modrm
3106 * @opcpuid sse
3107 * @optest ->
3108 * @opdone
3109 */
3110
3111
3112/**
3113 * @opcode 0x17
3114 * @opcodesub !11 mr/reg
3115 * @oppfx none
3116 * @opcpuid sse
3117 * @opgroup og_sse_simdfp_datamove
3118 * @opxcpttype 5
3119 * @optest op1=1 op2=2 -> op1=2
3120 * @optest op1=0 op2=-42 -> op1=-42
3121 */
3122FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3123{
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_MEM_MODE(bRm))
3126 {
3127 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3128
3129 IEM_MC_BEGIN(0, 2);
3130 IEM_MC_LOCAL(uint64_t, uSrc);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3132
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3135 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3136 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3137
3138 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3139 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144
3145 /**
3146 * @opdone
3147 * @opmnemonic ud0f17m3
3148 * @opcode 0x17
3149 * @opcodesub 11 mr/reg
3150 * @oppfx none
3151 * @opunused immediate
3152 * @opcpuid sse
3153 * @optest ->
3154 */
3155 else
3156 IEMOP_RAISE_INVALID_OPCODE_RET();
3157}
3158
3159
3160/**
3161 * @opcode 0x17
3162 * @opcodesub !11 mr/reg
3163 * @oppfx 0x66
3164 * @opcpuid sse2
3165 * @opgroup og_sse2_pcksclr_datamove
3166 * @opxcpttype 5
3167 * @optest op1=1 op2=2 -> op1=2
3168 * @optest op1=0 op2=-42 -> op1=-42
3169 */
3170FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3171{
3172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3173 if (IEM_IS_MODRM_MEM_MODE(bRm))
3174 {
3175 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3176
3177 IEM_MC_BEGIN(0, 2);
3178 IEM_MC_LOCAL(uint64_t, uSrc);
3179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3180
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3183 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3185
3186 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3187 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3188
3189 IEM_MC_ADVANCE_RIP_AND_FINISH();
3190 IEM_MC_END();
3191 }
3192
3193 /**
3194 * @opdone
3195 * @opmnemonic ud660f17m3
3196 * @opcode 0x17
3197 * @opcodesub 11 mr/reg
3198 * @oppfx 0x66
3199 * @opunused immediate
3200 * @opcpuid sse
3201 * @optest ->
3202 */
3203 else
3204 IEMOP_RAISE_INVALID_OPCODE_RET();
3205}
3206
3207
3208/**
3209 * @opdone
3210 * @opmnemonic udf30f17
3211 * @opcode 0x17
3212 * @oppfx 0xf3
3213 * @opunused intel-modrm
3214 * @opcpuid sse
3215 * @optest ->
3216 * @opdone
3217 */
3218
3219/**
3220 * @opmnemonic udf20f17
3221 * @opcode 0x17
3222 * @oppfx 0xf2
3223 * @opunused intel-modrm
3224 * @opcpuid sse
3225 * @optest ->
3226 * @opdone
3227 */
3228
3229
3230/** Opcode 0x0f 0x18. */
3231FNIEMOP_DEF(iemOp_prefetch_Grp16)
3232{
3233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3234 if (IEM_IS_MODRM_MEM_MODE(bRm))
3235 {
3236 switch (IEM_GET_MODRM_REG_8(bRm))
3237 {
3238 case 4: /* Aliased to /0 for the time being according to AMD. */
3239 case 5: /* Aliased to /0 for the time being according to AMD. */
3240 case 6: /* Aliased to /0 for the time being according to AMD. */
3241 case 7: /* Aliased to /0 for the time being according to AMD. */
3242 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3243 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3244 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3245 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3247 }
3248
3249 IEM_MC_BEGIN(0, 1);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3253 /* Currently a NOP. */
3254 NOREF(GCPtrEffSrc);
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 else
3259 IEMOP_RAISE_INVALID_OPCODE_RET();
3260}
3261
3262
3263/** Opcode 0x0f 0x19..0x1f. */
3264FNIEMOP_DEF(iemOp_nop_Ev)
3265{
3266 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3268 if (IEM_IS_MODRM_REG_MODE(bRm))
3269 {
3270 IEM_MC_BEGIN(0, 0);
3271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3272 IEM_MC_ADVANCE_RIP_AND_FINISH();
3273 IEM_MC_END();
3274 }
3275 else
3276 {
3277 IEM_MC_BEGIN(0, 1);
3278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 /* Currently a NOP. */
3282 NOREF(GCPtrEffSrc);
3283 IEM_MC_ADVANCE_RIP_AND_FINISH();
3284 IEM_MC_END();
3285 }
3286}
3287
3288
3289/** Opcode 0x0f 0x20. */
3290FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3291{
3292 /* mod is ignored, as is operand size overrides. */
3293 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3294 IEMOP_HLP_MIN_386();
3295 if (IEM_IS_64BIT_CODE(pVCpu))
3296 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3297 else
3298 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3299
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3302 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3303 {
3304 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3305 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3306 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3307 iCrReg |= 8;
3308 }
3309 switch (iCrReg)
3310 {
3311 case 0: case 2: case 3: case 4: case 8:
3312 break;
3313 default:
3314 IEMOP_RAISE_INVALID_OPCODE_RET();
3315 }
3316 IEMOP_HLP_DONE_DECODING();
3317
3318 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3319}
3320
3321
3322/** Opcode 0x0f 0x21. */
3323FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3324{
3325 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3326 IEMOP_HLP_MIN_386();
3327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3330 IEMOP_RAISE_INVALID_OPCODE_RET();
3331 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3332}
3333
3334
3335/** Opcode 0x0f 0x22. */
3336FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3337{
3338 /* mod is ignored, as is operand size overrides. */
3339 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3340 IEMOP_HLP_MIN_386();
3341 if (IEM_IS_64BIT_CODE(pVCpu))
3342 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3343 else
3344 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3345
3346 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3347 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3348 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3349 {
3350 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3351 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3352 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3353 iCrReg |= 8;
3354 }
3355 switch (iCrReg)
3356 {
3357 case 0: case 2: case 3: case 4: case 8:
3358 break;
3359 default:
3360 IEMOP_RAISE_INVALID_OPCODE_RET();
3361 }
3362 IEMOP_HLP_DONE_DECODING();
3363
3364 if (iCrReg & (2 | 8))
3365 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3366 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3367 else
3368 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3369 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3370}
3371
3372
3373/** Opcode 0x0f 0x23. */
3374FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3375{
3376 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3377 IEMOP_HLP_MIN_386();
3378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3380 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3381 IEMOP_RAISE_INVALID_OPCODE_RET();
3382 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3383}
3384
3385
3386/** Opcode 0x0f 0x24. */
3387FNIEMOP_DEF(iemOp_mov_Rd_Td)
3388{
3389 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3390 IEMOP_HLP_MIN_386();
3391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3394 IEMOP_RAISE_INVALID_OPCODE_RET();
3395 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3396}
3397
3398
3399/** Opcode 0x0f 0x26. */
3400FNIEMOP_DEF(iemOp_mov_Td_Rd)
3401{
3402 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3403 IEMOP_HLP_MIN_386();
3404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3406 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3407 IEMOP_RAISE_INVALID_OPCODE_RET();
3408 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3409}
3410
3411
3412/**
3413 * @opcode 0x28
3414 * @oppfx none
3415 * @opcpuid sse
3416 * @opgroup og_sse_simdfp_datamove
3417 * @opxcpttype 1
3418 * @optest op1=1 op2=2 -> op1=2
3419 * @optest op1=0 op2=-42 -> op1=-42
3420 */
3421FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3422{
3423 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3425 if (IEM_IS_MODRM_REG_MODE(bRm))
3426 {
3427 /*
3428 * Register, register.
3429 */
3430 IEM_MC_BEGIN(0, 0);
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3432 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3434 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3435 IEM_GET_MODRM_RM(pVCpu, bRm));
3436 IEM_MC_ADVANCE_RIP_AND_FINISH();
3437 IEM_MC_END();
3438 }
3439 else
3440 {
3441 /*
3442 * Register, memory.
3443 */
3444 IEM_MC_BEGIN(0, 2);
3445 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3450 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3451 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3452
3453 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3454 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3455
3456 IEM_MC_ADVANCE_RIP_AND_FINISH();
3457 IEM_MC_END();
3458 }
3459}
3460
3461/**
3462 * @opcode 0x28
3463 * @oppfx 66
3464 * @opcpuid sse2
3465 * @opgroup og_sse2_pcksclr_datamove
3466 * @opxcpttype 1
3467 * @optest op1=1 op2=2 -> op1=2
3468 * @optest op1=0 op2=-42 -> op1=-42
3469 */
3470FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3471{
3472 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3474 if (IEM_IS_MODRM_REG_MODE(bRm))
3475 {
3476 /*
3477 * Register, register.
3478 */
3479 IEM_MC_BEGIN(0, 0);
3480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3483 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3484 IEM_GET_MODRM_RM(pVCpu, bRm));
3485 IEM_MC_ADVANCE_RIP_AND_FINISH();
3486 IEM_MC_END();
3487 }
3488 else
3489 {
3490 /*
3491 * Register, memory.
3492 */
3493 IEM_MC_BEGIN(0, 2);
3494 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3496
3497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3499 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3500 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3501
3502 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3503 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3504
3505 IEM_MC_ADVANCE_RIP_AND_FINISH();
3506 IEM_MC_END();
3507 }
3508}
3509
3510/* Opcode 0xf3 0x0f 0x28 - invalid */
3511/* Opcode 0xf2 0x0f 0x28 - invalid */
3512
3513/**
3514 * @opcode 0x29
3515 * @oppfx none
3516 * @opcpuid sse
3517 * @opgroup og_sse_simdfp_datamove
3518 * @opxcpttype 1
3519 * @optest op1=1 op2=2 -> op1=2
3520 * @optest op1=0 op2=-42 -> op1=-42
3521 */
3522FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3523{
3524 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3526 if (IEM_IS_MODRM_REG_MODE(bRm))
3527 {
3528 /*
3529 * Register, register.
3530 */
3531 IEM_MC_BEGIN(0, 0);
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3535 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3536 IEM_GET_MODRM_REG(pVCpu, bRm));
3537 IEM_MC_ADVANCE_RIP_AND_FINISH();
3538 IEM_MC_END();
3539 }
3540 else
3541 {
3542 /*
3543 * Memory, register.
3544 */
3545 IEM_MC_BEGIN(0, 2);
3546 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3548
3549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3553
3554 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3555 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3556
3557 IEM_MC_ADVANCE_RIP_AND_FINISH();
3558 IEM_MC_END();
3559 }
3560}
3561
3562/**
3563 * @opcode 0x29
3564 * @oppfx 66
3565 * @opcpuid sse2
3566 * @opgroup og_sse2_pcksclr_datamove
3567 * @opxcpttype 1
3568 * @optest op1=1 op2=2 -> op1=2
3569 * @optest op1=0 op2=-42 -> op1=-42
3570 */
3571FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3572{
3573 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3575 if (IEM_IS_MODRM_REG_MODE(bRm))
3576 {
3577 /*
3578 * Register, register.
3579 */
3580 IEM_MC_BEGIN(0, 0);
3581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3582 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3583 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3584 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3585 IEM_GET_MODRM_REG(pVCpu, bRm));
3586 IEM_MC_ADVANCE_RIP_AND_FINISH();
3587 IEM_MC_END();
3588 }
3589 else
3590 {
3591 /*
3592 * Memory, register.
3593 */
3594 IEM_MC_BEGIN(0, 2);
3595 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3597
3598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3600 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3601 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3602
3603 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3604 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3605
3606 IEM_MC_ADVANCE_RIP_AND_FINISH();
3607 IEM_MC_END();
3608 }
3609}
3610
3611/* Opcode 0xf3 0x0f 0x29 - invalid */
3612/* Opcode 0xf2 0x0f 0x29 - invalid */
3613
3614
3615/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3616FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3617{
3618 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3620 if (IEM_IS_MODRM_REG_MODE(bRm))
3621 {
3622 /*
3623 * XMM, MMX
3624 */
3625 IEM_MC_BEGIN(3, 1);
3626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3627 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3628 IEM_MC_LOCAL(X86XMMREG, Dst);
3629 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3630 IEM_MC_ARG(uint64_t, u64Src, 2);
3631 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3632 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3633 IEM_MC_PREPARE_FPU_USAGE();
3634 IEM_MC_FPU_TO_MMX_MODE();
3635
3636 IEM_MC_REF_MXCSR(pfMxcsr);
3637 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3638 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3639
3640 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3641 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3642 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3643 } IEM_MC_ELSE() {
3644 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3645 } IEM_MC_ENDIF();
3646
3647 IEM_MC_ADVANCE_RIP_AND_FINISH();
3648 IEM_MC_END();
3649 }
3650 else
3651 {
3652 /*
3653 * XMM, [mem64]
3654 */
3655 IEM_MC_BEGIN(3, 2);
3656 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3657 IEM_MC_LOCAL(X86XMMREG, Dst);
3658 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3659 IEM_MC_ARG(uint64_t, u64Src, 2);
3660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3661
3662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3666 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3667
3668 IEM_MC_PREPARE_FPU_USAGE();
3669 IEM_MC_FPU_TO_MMX_MODE();
3670 IEM_MC_REF_MXCSR(pfMxcsr);
3671
3672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3673 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3674 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3675 } IEM_MC_ELSE() {
3676 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3677 } IEM_MC_ENDIF();
3678
3679 IEM_MC_ADVANCE_RIP_AND_FINISH();
3680 IEM_MC_END();
3681 }
3682}
3683
3684
3685/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3686FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3687{
3688 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3690 if (IEM_IS_MODRM_REG_MODE(bRm))
3691 {
3692 /*
3693 * XMM, MMX
3694 */
3695 IEM_MC_BEGIN(3, 1);
3696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3697 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3698 IEM_MC_LOCAL(X86XMMREG, Dst);
3699 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3700 IEM_MC_ARG(uint64_t, u64Src, 2);
3701 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3702 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3703 IEM_MC_PREPARE_FPU_USAGE();
3704 IEM_MC_FPU_TO_MMX_MODE();
3705
3706 IEM_MC_REF_MXCSR(pfMxcsr);
3707 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3708
3709 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3710 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3711 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3712 } IEM_MC_ELSE() {
3713 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3714 } IEM_MC_ENDIF();
3715
3716 IEM_MC_ADVANCE_RIP_AND_FINISH();
3717 IEM_MC_END();
3718 }
3719 else
3720 {
3721 /*
3722 * XMM, [mem64]
3723 */
3724 IEM_MC_BEGIN(3, 3);
3725 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3726 IEM_MC_LOCAL(X86XMMREG, Dst);
3727 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3728 IEM_MC_ARG(uint64_t, u64Src, 2);
3729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3730
3731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3733 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3735 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3736
3737 /* Doesn't cause a transition to MMX mode. */
3738 IEM_MC_PREPARE_SSE_USAGE();
3739 IEM_MC_REF_MXCSR(pfMxcsr);
3740
3741 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3742 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3743 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3744 } IEM_MC_ELSE() {
3745 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3746 } IEM_MC_ENDIF();
3747
3748 IEM_MC_ADVANCE_RIP_AND_FINISH();
3749 IEM_MC_END();
3750 }
3751}
3752
3753
3754/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3755FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3756{
3757 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3758
3759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3760 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3761 {
3762 if (IEM_IS_MODRM_REG_MODE(bRm))
3763 {
3764 /* XMM, greg64 */
3765 IEM_MC_BEGIN(3, 2);
3766 IEM_MC_LOCAL(uint32_t, fMxcsr);
3767 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3768 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3769 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3770 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3771
3772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3774 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3775
3776 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3777 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3778 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3779 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3780 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3781 } IEM_MC_ELSE() {
3782 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3783 } IEM_MC_ENDIF();
3784
3785 IEM_MC_ADVANCE_RIP_AND_FINISH();
3786 IEM_MC_END();
3787 }
3788 else
3789 {
3790 /* XMM, [mem64] */
3791 IEM_MC_BEGIN(3, 4);
3792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3793 IEM_MC_LOCAL(uint32_t, fMxcsr);
3794 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3795 IEM_MC_LOCAL(int64_t, i64Src);
3796 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3797 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3798 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3799
3800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3802 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3803 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3804
3805 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3806 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3807 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3808 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3809 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3810 } IEM_MC_ELSE() {
3811 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3812 } IEM_MC_ENDIF();
3813
3814 IEM_MC_ADVANCE_RIP_AND_FINISH();
3815 IEM_MC_END();
3816 }
3817 }
3818 else
3819 {
3820 if (IEM_IS_MODRM_REG_MODE(bRm))
3821 {
3822 /* greg, XMM */
3823 IEM_MC_BEGIN(3, 2);
3824 IEM_MC_LOCAL(uint32_t, fMxcsr);
3825 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3826 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3827 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3828 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3829
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3831 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3832 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3833
3834 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3835 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3836 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3837 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3838 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3839 } IEM_MC_ELSE() {
3840 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3841 } IEM_MC_ENDIF();
3842
3843 IEM_MC_ADVANCE_RIP_AND_FINISH();
3844 IEM_MC_END();
3845 }
3846 else
3847 {
3848 /* greg, [mem32] */
3849 IEM_MC_BEGIN(3, 4);
3850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3851 IEM_MC_LOCAL(uint32_t, fMxcsr);
3852 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3853 IEM_MC_LOCAL(int32_t, i32Src);
3854 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3855 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3856 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3857
3858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3860 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3861 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3862
3863 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3864 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3865 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3866 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3867 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3868 } IEM_MC_ELSE() {
3869 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3870 } IEM_MC_ENDIF();
3871
3872 IEM_MC_ADVANCE_RIP_AND_FINISH();
3873 IEM_MC_END();
3874 }
3875 }
3876}
3877
3878
3879/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3880FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3881{
3882 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3883
3884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3885 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3886 {
3887 if (IEM_IS_MODRM_REG_MODE(bRm))
3888 {
3889 /* XMM, greg64 */
3890 IEM_MC_BEGIN(3, 2);
3891 IEM_MC_LOCAL(uint32_t, fMxcsr);
3892 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3893 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3894 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3895 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3896
3897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3899 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3900
3901 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3902 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3903 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3904 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3905 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3906 } IEM_MC_ELSE() {
3907 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3908 } IEM_MC_ENDIF();
3909
3910 IEM_MC_ADVANCE_RIP_AND_FINISH();
3911 IEM_MC_END();
3912 }
3913 else
3914 {
3915 /* XMM, [mem64] */
3916 IEM_MC_BEGIN(3, 4);
3917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3918 IEM_MC_LOCAL(uint32_t, fMxcsr);
3919 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3920 IEM_MC_LOCAL(int64_t, i64Src);
3921 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3922 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3923 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3924
3925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3927 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3928 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3929
3930 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3931 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3932 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3933 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3934 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3935 } IEM_MC_ELSE() {
3936 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3937 } IEM_MC_ENDIF();
3938
3939 IEM_MC_ADVANCE_RIP_AND_FINISH();
3940 IEM_MC_END();
3941 }
3942 }
3943 else
3944 {
3945 if (IEM_IS_MODRM_REG_MODE(bRm))
3946 {
3947 /* XMM, greg32 */
3948 IEM_MC_BEGIN(3, 2);
3949 IEM_MC_LOCAL(uint32_t, fMxcsr);
3950 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3951 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3952 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3953 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3954
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3957 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3958
3959 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3960 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3961 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3962 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3963 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3964 } IEM_MC_ELSE() {
3965 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3966 } IEM_MC_ENDIF();
3967
3968 IEM_MC_ADVANCE_RIP_AND_FINISH();
3969 IEM_MC_END();
3970 }
3971 else
3972 {
3973 /* XMM, [mem32] */
3974 IEM_MC_BEGIN(3, 4);
3975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3976 IEM_MC_LOCAL(uint32_t, fMxcsr);
3977 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3978 IEM_MC_LOCAL(int32_t, i32Src);
3979 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3980 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3981 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3982
3983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3986 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3987
3988 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3989 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3990 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3991 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3992 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3993 } IEM_MC_ELSE() {
3994 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3995 } IEM_MC_ENDIF();
3996
3997 IEM_MC_ADVANCE_RIP_AND_FINISH();
3998 IEM_MC_END();
3999 }
4000 }
4001}
4002
4003
4004/**
4005 * @opcode 0x2b
4006 * @opcodesub !11 mr/reg
4007 * @oppfx none
4008 * @opcpuid sse
4009 * @opgroup og_sse1_cachect
4010 * @opxcpttype 1
4011 * @optest op1=1 op2=2 -> op1=2
4012 * @optest op1=0 op2=-42 -> op1=-42
4013 */
4014FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4015{
4016 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4018 if (IEM_IS_MODRM_MEM_MODE(bRm))
4019 {
4020 /*
4021 * memory, register.
4022 */
4023 IEM_MC_BEGIN(0, 2);
4024 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4026
4027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4031
4032 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4033 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4034
4035 IEM_MC_ADVANCE_RIP_AND_FINISH();
4036 IEM_MC_END();
4037 }
4038 /* The register, register encoding is invalid. */
4039 else
4040 IEMOP_RAISE_INVALID_OPCODE_RET();
4041}
4042
4043/**
4044 * @opcode 0x2b
4045 * @opcodesub !11 mr/reg
4046 * @oppfx 0x66
4047 * @opcpuid sse2
4048 * @opgroup og_sse2_cachect
4049 * @opxcpttype 1
4050 * @optest op1=1 op2=2 -> op1=2
4051 * @optest op1=0 op2=-42 -> op1=-42
4052 */
4053FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4054{
4055 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4057 if (IEM_IS_MODRM_MEM_MODE(bRm))
4058 {
4059 /*
4060 * memory, register.
4061 */
4062 IEM_MC_BEGIN(0, 2);
4063 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4065
4066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4068 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4069 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4070
4071 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4072 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4073
4074 IEM_MC_ADVANCE_RIP_AND_FINISH();
4075 IEM_MC_END();
4076 }
4077 /* The register, register encoding is invalid. */
4078 else
4079 IEMOP_RAISE_INVALID_OPCODE_RET();
4080}
4081/* Opcode 0xf3 0x0f 0x2b - invalid */
4082/* Opcode 0xf2 0x0f 0x2b - invalid */
4083
4084
4085/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4086FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4087{
4088 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4090 if (IEM_IS_MODRM_REG_MODE(bRm))
4091 {
4092 /*
4093 * Register, register.
4094 */
4095 IEM_MC_BEGIN(3, 1);
4096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4097 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4098 IEM_MC_LOCAL(uint64_t, u64Dst);
4099 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4100 IEM_MC_ARG(uint64_t, u64Src, 2);
4101 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4102 IEM_MC_PREPARE_FPU_USAGE();
4103 IEM_MC_FPU_TO_MMX_MODE();
4104
4105 IEM_MC_REF_MXCSR(pfMxcsr);
4106 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4107
4108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4109 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4110 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4111 } IEM_MC_ELSE() {
4112 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4113 } IEM_MC_ENDIF();
4114
4115 IEM_MC_ADVANCE_RIP_AND_FINISH();
4116 IEM_MC_END();
4117 }
4118 else
4119 {
4120 /*
4121 * Register, memory.
4122 */
4123 IEM_MC_BEGIN(3, 2);
4124 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4125 IEM_MC_LOCAL(uint64_t, u64Dst);
4126 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4127 IEM_MC_ARG(uint64_t, u64Src, 2);
4128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4129
4130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4132 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4133 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4134
4135 IEM_MC_PREPARE_FPU_USAGE();
4136 IEM_MC_FPU_TO_MMX_MODE();
4137 IEM_MC_REF_MXCSR(pfMxcsr);
4138
4139 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4140 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4141 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4142 } IEM_MC_ELSE() {
4143 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4144 } IEM_MC_ENDIF();
4145
4146 IEM_MC_ADVANCE_RIP_AND_FINISH();
4147 IEM_MC_END();
4148 }
4149}
4150
4151
4152/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4153FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4154{
4155 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4157 if (IEM_IS_MODRM_REG_MODE(bRm))
4158 {
4159 /*
4160 * Register, register.
4161 */
4162 IEM_MC_BEGIN(3, 1);
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4164 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4165 IEM_MC_LOCAL(uint64_t, u64Dst);
4166 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4167 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4169 IEM_MC_PREPARE_FPU_USAGE();
4170 IEM_MC_FPU_TO_MMX_MODE();
4171
4172 IEM_MC_REF_MXCSR(pfMxcsr);
4173 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4174
4175 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4176 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4177 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4178 } IEM_MC_ELSE() {
4179 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4180 } IEM_MC_ENDIF();
4181
4182 IEM_MC_ADVANCE_RIP_AND_FINISH();
4183 IEM_MC_END();
4184 }
4185 else
4186 {
4187 /*
4188 * Register, memory.
4189 */
4190 IEM_MC_BEGIN(3, 3);
4191 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4192 IEM_MC_LOCAL(uint64_t, u64Dst);
4193 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4194 IEM_MC_LOCAL(X86XMMREG, uSrc);
4195 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4197
4198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4200 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4201 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4202
4203 IEM_MC_PREPARE_FPU_USAGE();
4204 IEM_MC_FPU_TO_MMX_MODE();
4205
4206 IEM_MC_REF_MXCSR(pfMxcsr);
4207
4208 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4209 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4210 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4211 } IEM_MC_ELSE() {
4212 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4213 } IEM_MC_ENDIF();
4214
4215 IEM_MC_ADVANCE_RIP_AND_FINISH();
4216 IEM_MC_END();
4217 }
4218}
4219
4220
4221/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4222FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4223{
4224 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4225
4226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4227 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4228 {
4229 if (IEM_IS_MODRM_REG_MODE(bRm))
4230 {
4231 /* greg64, XMM */
4232 IEM_MC_BEGIN(3, 2);
4233 IEM_MC_LOCAL(uint32_t, fMxcsr);
4234 IEM_MC_LOCAL(int64_t, i64Dst);
4235 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4236 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4237 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4238
4239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4240 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4241 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4242
4243 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4244 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4245 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4246 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4247 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4248 } IEM_MC_ELSE() {
4249 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4250 } IEM_MC_ENDIF();
4251
4252 IEM_MC_ADVANCE_RIP_AND_FINISH();
4253 IEM_MC_END();
4254 }
4255 else
4256 {
4257 /* greg64, [mem64] */
4258 IEM_MC_BEGIN(3, 4);
4259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4260 IEM_MC_LOCAL(uint32_t, fMxcsr);
4261 IEM_MC_LOCAL(int64_t, i64Dst);
4262 IEM_MC_LOCAL(uint32_t, u32Src);
4263 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4264 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4265 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4266
4267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4269 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4270 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4271
4272 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4273 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4274 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4275 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4276 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4277 } IEM_MC_ELSE() {
4278 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4279 } IEM_MC_ENDIF();
4280
4281 IEM_MC_ADVANCE_RIP_AND_FINISH();
4282 IEM_MC_END();
4283 }
4284 }
4285 else
4286 {
4287 if (IEM_IS_MODRM_REG_MODE(bRm))
4288 {
4289 /* greg, XMM */
4290 IEM_MC_BEGIN(3, 2);
4291 IEM_MC_LOCAL(uint32_t, fMxcsr);
4292 IEM_MC_LOCAL(int32_t, i32Dst);
4293 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4294 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4295 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4296
4297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4298 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4299 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4300
4301 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4302 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4303 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4304 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4305 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4306 } IEM_MC_ELSE() {
4307 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4308 } IEM_MC_ENDIF();
4309
4310 IEM_MC_ADVANCE_RIP_AND_FINISH();
4311 IEM_MC_END();
4312 }
4313 else
4314 {
4315 /* greg, [mem] */
4316 IEM_MC_BEGIN(3, 4);
4317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4318 IEM_MC_LOCAL(uint32_t, fMxcsr);
4319 IEM_MC_LOCAL(int32_t, i32Dst);
4320 IEM_MC_LOCAL(uint32_t, u32Src);
4321 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4322 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4323 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4324
4325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4328 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4329
4330 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4331 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4332 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4333 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4334 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4335 } IEM_MC_ELSE() {
4336 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4337 } IEM_MC_ENDIF();
4338
4339 IEM_MC_ADVANCE_RIP_AND_FINISH();
4340 IEM_MC_END();
4341 }
4342 }
4343}
4344
4345
4346/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4347FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4348{
4349 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4350
4351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4352 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4353 {
4354 if (IEM_IS_MODRM_REG_MODE(bRm))
4355 {
4356 /* greg64, XMM */
4357 IEM_MC_BEGIN(3, 2);
4358 IEM_MC_LOCAL(uint32_t, fMxcsr);
4359 IEM_MC_LOCAL(int64_t, i64Dst);
4360 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4361 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4362 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4363
4364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4365 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4366 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4367
4368 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4369 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4370 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4371 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4372 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4373 } IEM_MC_ELSE() {
4374 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4375 } IEM_MC_ENDIF();
4376
4377 IEM_MC_ADVANCE_RIP_AND_FINISH();
4378 IEM_MC_END();
4379 }
4380 else
4381 {
4382 /* greg64, [mem64] */
4383 IEM_MC_BEGIN(3, 4);
4384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4385 IEM_MC_LOCAL(uint32_t, fMxcsr);
4386 IEM_MC_LOCAL(int64_t, i64Dst);
4387 IEM_MC_LOCAL(uint64_t, u64Src);
4388 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4389 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4390 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4391
4392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4395 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4396
4397 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4398 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4399 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4400 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4401 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4402 } IEM_MC_ELSE() {
4403 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4404 } IEM_MC_ENDIF();
4405
4406 IEM_MC_ADVANCE_RIP_AND_FINISH();
4407 IEM_MC_END();
4408 }
4409 }
4410 else
4411 {
4412 if (IEM_IS_MODRM_REG_MODE(bRm))
4413 {
4414 /* greg, XMM */
4415 IEM_MC_BEGIN(3, 2);
4416 IEM_MC_LOCAL(uint32_t, fMxcsr);
4417 IEM_MC_LOCAL(int32_t, i32Dst);
4418 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4419 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4420 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4421
4422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4423 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4424 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4425
4426 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4427 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4428 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4429 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4430 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4431 } IEM_MC_ELSE() {
4432 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4433 } IEM_MC_ENDIF();
4434
4435 IEM_MC_ADVANCE_RIP_AND_FINISH();
4436 IEM_MC_END();
4437 }
4438 else
4439 {
4440 /* greg32, [mem32] */
4441 IEM_MC_BEGIN(3, 4);
4442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4443 IEM_MC_LOCAL(uint32_t, fMxcsr);
4444 IEM_MC_LOCAL(int32_t, i32Dst);
4445 IEM_MC_LOCAL(uint64_t, u64Src);
4446 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4447 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4448 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4449
4450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4452 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4453 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4454
4455 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4456 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4457 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4458 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4459 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4460 } IEM_MC_ELSE() {
4461 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4462 } IEM_MC_ENDIF();
4463
4464 IEM_MC_ADVANCE_RIP_AND_FINISH();
4465 IEM_MC_END();
4466 }
4467 }
4468}
4469
4470
4471/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4472FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4473{
4474 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4476 if (IEM_IS_MODRM_REG_MODE(bRm))
4477 {
4478 /*
4479 * Register, register.
4480 */
4481 IEM_MC_BEGIN(3, 1);
4482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4483 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4484 IEM_MC_LOCAL(uint64_t, u64Dst);
4485 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4486 IEM_MC_ARG(uint64_t, u64Src, 2);
4487
4488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4489 IEM_MC_PREPARE_FPU_USAGE();
4490 IEM_MC_FPU_TO_MMX_MODE();
4491
4492 IEM_MC_REF_MXCSR(pfMxcsr);
4493 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4494
4495 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4496 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4497 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4498 } IEM_MC_ELSE() {
4499 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4500 } IEM_MC_ENDIF();
4501
4502 IEM_MC_ADVANCE_RIP_AND_FINISH();
4503 IEM_MC_END();
4504 }
4505 else
4506 {
4507 /*
4508 * Register, memory.
4509 */
4510 IEM_MC_BEGIN(3, 2);
4511 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4512 IEM_MC_LOCAL(uint64_t, u64Dst);
4513 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4514 IEM_MC_ARG(uint64_t, u64Src, 2);
4515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4516
4517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4519 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4520 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4521
4522 IEM_MC_PREPARE_FPU_USAGE();
4523 IEM_MC_FPU_TO_MMX_MODE();
4524 IEM_MC_REF_MXCSR(pfMxcsr);
4525
4526 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4527 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4528 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4529 } IEM_MC_ELSE() {
4530 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4531 } IEM_MC_ENDIF();
4532
4533 IEM_MC_ADVANCE_RIP_AND_FINISH();
4534 IEM_MC_END();
4535 }
4536}
4537
4538
4539/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4540FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4541{
4542 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4544 if (IEM_IS_MODRM_REG_MODE(bRm))
4545 {
4546 /*
4547 * Register, register.
4548 */
4549 IEM_MC_BEGIN(3, 1);
4550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4551 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4552 IEM_MC_LOCAL(uint64_t, u64Dst);
4553 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4554 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4555
4556 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4557 IEM_MC_PREPARE_FPU_USAGE();
4558 IEM_MC_FPU_TO_MMX_MODE();
4559
4560 IEM_MC_REF_MXCSR(pfMxcsr);
4561 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4562
4563 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4564 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4565 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4566 } IEM_MC_ELSE() {
4567 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4568 } IEM_MC_ENDIF();
4569
4570 IEM_MC_ADVANCE_RIP_AND_FINISH();
4571 IEM_MC_END();
4572 }
4573 else
4574 {
4575 /*
4576 * Register, memory.
4577 */
4578 IEM_MC_BEGIN(3, 3);
4579 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4580 IEM_MC_LOCAL(uint64_t, u64Dst);
4581 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4582 IEM_MC_LOCAL(X86XMMREG, uSrc);
4583 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4585
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4588 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4589 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4590
4591 IEM_MC_PREPARE_FPU_USAGE();
4592 IEM_MC_FPU_TO_MMX_MODE();
4593
4594 IEM_MC_REF_MXCSR(pfMxcsr);
4595
4596 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4597 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4598 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4599 } IEM_MC_ELSE() {
4600 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4601 } IEM_MC_ENDIF();
4602
4603 IEM_MC_ADVANCE_RIP_AND_FINISH();
4604 IEM_MC_END();
4605 }
4606}
4607
4608
4609/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4610FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4611{
4612 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4613
4614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4615 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4616 {
4617 if (IEM_IS_MODRM_REG_MODE(bRm))
4618 {
4619 /* greg64, XMM */
4620 IEM_MC_BEGIN(3, 2);
4621 IEM_MC_LOCAL(uint32_t, fMxcsr);
4622 IEM_MC_LOCAL(int64_t, i64Dst);
4623 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4624 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4625 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4626
4627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4628 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4629 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4630
4631 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4632 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4633 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4634 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4635 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4636 } IEM_MC_ELSE() {
4637 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4638 } IEM_MC_ENDIF();
4639
4640 IEM_MC_ADVANCE_RIP_AND_FINISH();
4641 IEM_MC_END();
4642 }
4643 else
4644 {
4645 /* greg64, [mem64] */
4646 IEM_MC_BEGIN(3, 4);
4647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4648 IEM_MC_LOCAL(uint32_t, fMxcsr);
4649 IEM_MC_LOCAL(int64_t, i64Dst);
4650 IEM_MC_LOCAL(uint32_t, u32Src);
4651 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4652 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4653 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4654
4655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4657 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4658 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4659
4660 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4661 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4662 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4663 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4664 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4665 } IEM_MC_ELSE() {
4666 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4667 } IEM_MC_ENDIF();
4668
4669 IEM_MC_ADVANCE_RIP_AND_FINISH();
4670 IEM_MC_END();
4671 }
4672 }
4673 else
4674 {
4675 if (IEM_IS_MODRM_REG_MODE(bRm))
4676 {
4677 /* greg, XMM */
4678 IEM_MC_BEGIN(3, 2);
4679 IEM_MC_LOCAL(uint32_t, fMxcsr);
4680 IEM_MC_LOCAL(int32_t, i32Dst);
4681 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4682 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4683 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4684
4685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4686 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4687 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4688
4689 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4690 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4691 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4692 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4693 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4694 } IEM_MC_ELSE() {
4695 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4696 } IEM_MC_ENDIF();
4697
4698 IEM_MC_ADVANCE_RIP_AND_FINISH();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 /* greg, [mem] */
4704 IEM_MC_BEGIN(3, 4);
4705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4706 IEM_MC_LOCAL(uint32_t, fMxcsr);
4707 IEM_MC_LOCAL(int32_t, i32Dst);
4708 IEM_MC_LOCAL(uint32_t, u32Src);
4709 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4710 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4711 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4712
4713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4715 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4716 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4717
4718 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4719 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4720 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4721 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4722 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4723 } IEM_MC_ELSE() {
4724 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4725 } IEM_MC_ENDIF();
4726
4727 IEM_MC_ADVANCE_RIP_AND_FINISH();
4728 IEM_MC_END();
4729 }
4730 }
4731}
4732
4733
4734/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4735FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4736{
4737 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4738
4739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4741 {
4742 if (IEM_IS_MODRM_REG_MODE(bRm))
4743 {
4744 /* greg64, XMM */
4745 IEM_MC_BEGIN(3, 2);
4746 IEM_MC_LOCAL(uint32_t, fMxcsr);
4747 IEM_MC_LOCAL(int64_t, i64Dst);
4748 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4749 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4750 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4751
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4754 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4755
4756 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4757 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4758 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4759 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4760 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4761 } IEM_MC_ELSE() {
4762 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4763 } IEM_MC_ENDIF();
4764
4765 IEM_MC_ADVANCE_RIP_AND_FINISH();
4766 IEM_MC_END();
4767 }
4768 else
4769 {
4770 /* greg64, [mem64] */
4771 IEM_MC_BEGIN(3, 4);
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4773 IEM_MC_LOCAL(uint32_t, fMxcsr);
4774 IEM_MC_LOCAL(int64_t, i64Dst);
4775 IEM_MC_LOCAL(uint64_t, u64Src);
4776 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4777 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4778 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4779
4780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4782 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4783 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4784
4785 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4786 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4787 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4788 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4789 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4790 } IEM_MC_ELSE() {
4791 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4792 } IEM_MC_ENDIF();
4793
4794 IEM_MC_ADVANCE_RIP_AND_FINISH();
4795 IEM_MC_END();
4796 }
4797 }
4798 else
4799 {
4800 if (IEM_IS_MODRM_REG_MODE(bRm))
4801 {
4802 /* greg32, XMM */
4803 IEM_MC_BEGIN(3, 2);
4804 IEM_MC_LOCAL(uint32_t, fMxcsr);
4805 IEM_MC_LOCAL(int32_t, i32Dst);
4806 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4807 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4808 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4809
4810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4812 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4813
4814 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4815 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4816 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4817 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4818 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4819 } IEM_MC_ELSE() {
4820 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4821 } IEM_MC_ENDIF();
4822
4823 IEM_MC_ADVANCE_RIP_AND_FINISH();
4824 IEM_MC_END();
4825 }
4826 else
4827 {
4828 /* greg32, [mem64] */
4829 IEM_MC_BEGIN(3, 4);
4830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4831 IEM_MC_LOCAL(uint32_t, fMxcsr);
4832 IEM_MC_LOCAL(int32_t, i32Dst);
4833 IEM_MC_LOCAL(uint64_t, u64Src);
4834 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4835 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4836 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4837
4838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4841 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4842
4843 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4844 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4845 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4846 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4848 } IEM_MC_ELSE() {
4849 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4850 } IEM_MC_ENDIF();
4851
4852 IEM_MC_ADVANCE_RIP_AND_FINISH();
4853 IEM_MC_END();
4854 }
4855 }
4856}
4857
4858
4859/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4860FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4861{
4862 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4864 if (IEM_IS_MODRM_REG_MODE(bRm))
4865 {
4866 /*
4867 * Register, register.
4868 */
4869 IEM_MC_BEGIN(4, 1);
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4871 IEM_MC_LOCAL(uint32_t, fEFlags);
4872 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4873 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4874 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4875 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4876 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4877 IEM_MC_PREPARE_SSE_USAGE();
4878 IEM_MC_FETCH_EFLAGS(fEFlags);
4879 IEM_MC_REF_MXCSR(pfMxcsr);
4880 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4881 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4882 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4883 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4884 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4885 } IEM_MC_ELSE() {
4886 IEM_MC_COMMIT_EFLAGS(fEFlags);
4887 } IEM_MC_ENDIF();
4888
4889 IEM_MC_ADVANCE_RIP_AND_FINISH();
4890 IEM_MC_END();
4891 }
4892 else
4893 {
4894 /*
4895 * Register, memory.
4896 */
4897 IEM_MC_BEGIN(4, 3);
4898 IEM_MC_LOCAL(uint32_t, fEFlags);
4899 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4900 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4901 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4902 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4903 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4905
4906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4908 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4909 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4910
4911 IEM_MC_PREPARE_SSE_USAGE();
4912 IEM_MC_FETCH_EFLAGS(fEFlags);
4913 IEM_MC_REF_MXCSR(pfMxcsr);
4914 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4915 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4916 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4917 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4918 } IEM_MC_ELSE() {
4919 IEM_MC_COMMIT_EFLAGS(fEFlags);
4920 } IEM_MC_ENDIF();
4921
4922 IEM_MC_ADVANCE_RIP_AND_FINISH();
4923 IEM_MC_END();
4924 }
4925}
4926
4927
4928/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4929FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4930{
4931 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4933 if (IEM_IS_MODRM_REG_MODE(bRm))
4934 {
4935 /*
4936 * Register, register.
4937 */
4938 IEM_MC_BEGIN(4, 1);
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4940 IEM_MC_LOCAL(uint32_t, fEFlags);
4941 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4942 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4943 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4944 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4945 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4946 IEM_MC_PREPARE_SSE_USAGE();
4947 IEM_MC_FETCH_EFLAGS(fEFlags);
4948 IEM_MC_REF_MXCSR(pfMxcsr);
4949 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4950 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4951 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4952 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4953 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4954 } IEM_MC_ELSE() {
4955 IEM_MC_COMMIT_EFLAGS(fEFlags);
4956 } IEM_MC_ENDIF();
4957
4958 IEM_MC_ADVANCE_RIP_AND_FINISH();
4959 IEM_MC_END();
4960 }
4961 else
4962 {
4963 /*
4964 * Register, memory.
4965 */
4966 IEM_MC_BEGIN(4, 3);
4967 IEM_MC_LOCAL(uint32_t, fEFlags);
4968 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4969 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4970 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4971 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4972 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4974
4975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4977 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4978 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4979
4980 IEM_MC_PREPARE_SSE_USAGE();
4981 IEM_MC_FETCH_EFLAGS(fEFlags);
4982 IEM_MC_REF_MXCSR(pfMxcsr);
4983 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4984 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4985 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4986 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4987 } IEM_MC_ELSE() {
4988 IEM_MC_COMMIT_EFLAGS(fEFlags);
4989 } IEM_MC_ENDIF();
4990
4991 IEM_MC_ADVANCE_RIP_AND_FINISH();
4992 IEM_MC_END();
4993 }
4994}
4995
4996
4997/* Opcode 0xf3 0x0f 0x2e - invalid */
4998/* Opcode 0xf2 0x0f 0x2e - invalid */
4999
5000
5001/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5002FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5003{
5004 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5006 if (IEM_IS_MODRM_REG_MODE(bRm))
5007 {
5008 /*
5009 * Register, register.
5010 */
5011 IEM_MC_BEGIN(4, 1);
5012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5013 IEM_MC_LOCAL(uint32_t, fEFlags);
5014 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5015 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5016 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5017 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5019 IEM_MC_PREPARE_SSE_USAGE();
5020 IEM_MC_FETCH_EFLAGS(fEFlags);
5021 IEM_MC_REF_MXCSR(pfMxcsr);
5022 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5023 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5024 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5025 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5026 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5027 } IEM_MC_ELSE() {
5028 IEM_MC_COMMIT_EFLAGS(fEFlags);
5029 } IEM_MC_ENDIF();
5030
5031 IEM_MC_ADVANCE_RIP_AND_FINISH();
5032 IEM_MC_END();
5033 }
5034 else
5035 {
5036 /*
5037 * Register, memory.
5038 */
5039 IEM_MC_BEGIN(4, 3);
5040 IEM_MC_LOCAL(uint32_t, fEFlags);
5041 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5042 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5043 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5044 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5045 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5047
5048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5050 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5051 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5052
5053 IEM_MC_PREPARE_SSE_USAGE();
5054 IEM_MC_FETCH_EFLAGS(fEFlags);
5055 IEM_MC_REF_MXCSR(pfMxcsr);
5056 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5057 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5058 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5059 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5060 } IEM_MC_ELSE() {
5061 IEM_MC_COMMIT_EFLAGS(fEFlags);
5062 } IEM_MC_ENDIF();
5063
5064 IEM_MC_ADVANCE_RIP_AND_FINISH();
5065 IEM_MC_END();
5066 }
5067}
5068
5069
5070/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5071FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5072{
5073 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5075 if (IEM_IS_MODRM_REG_MODE(bRm))
5076 {
5077 /*
5078 * Register, register.
5079 */
5080 IEM_MC_BEGIN(4, 1);
5081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5082 IEM_MC_LOCAL(uint32_t, fEFlags);
5083 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5084 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5085 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5086 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5087 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5088 IEM_MC_PREPARE_SSE_USAGE();
5089 IEM_MC_FETCH_EFLAGS(fEFlags);
5090 IEM_MC_REF_MXCSR(pfMxcsr);
5091 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5092 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5094 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5095 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5096 } IEM_MC_ELSE() {
5097 IEM_MC_COMMIT_EFLAGS(fEFlags);
5098 } IEM_MC_ENDIF();
5099
5100 IEM_MC_ADVANCE_RIP_AND_FINISH();
5101 IEM_MC_END();
5102 }
5103 else
5104 {
5105 /*
5106 * Register, memory.
5107 */
5108 IEM_MC_BEGIN(4, 3);
5109 IEM_MC_LOCAL(uint32_t, fEFlags);
5110 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5111 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5112 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5113 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5114 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5116
5117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5120 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5121
5122 IEM_MC_PREPARE_SSE_USAGE();
5123 IEM_MC_FETCH_EFLAGS(fEFlags);
5124 IEM_MC_REF_MXCSR(pfMxcsr);
5125 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5126 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5127 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5128 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5129 } IEM_MC_ELSE() {
5130 IEM_MC_COMMIT_EFLAGS(fEFlags);
5131 } IEM_MC_ENDIF();
5132
5133 IEM_MC_ADVANCE_RIP_AND_FINISH();
5134 IEM_MC_END();
5135 }
5136}
5137
5138
5139/* Opcode 0xf3 0x0f 0x2f - invalid */
5140/* Opcode 0xf2 0x0f 0x2f - invalid */
5141
5142/** Opcode 0x0f 0x30. */
5143FNIEMOP_DEF(iemOp_wrmsr)
5144{
5145 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5147 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wrmsr);
5148}
5149
5150
5151/** Opcode 0x0f 0x31. */
5152FNIEMOP_DEF(iemOp_rdtsc)
5153{
5154 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5156 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtsc);
5157}
5158
5159
5160/** Opcode 0x0f 0x33. */
5161FNIEMOP_DEF(iemOp_rdmsr)
5162{
5163 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5165 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdmsr);
5166}
5167
5168
5169/** Opcode 0x0f 0x34. */
5170FNIEMOP_DEF(iemOp_rdpmc)
5171{
5172 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdpmc);
5175}
5176
5177
5178/** Opcode 0x0f 0x34. */
5179FNIEMOP_DEF(iemOp_sysenter)
5180{
5181 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5183 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5184 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5185 iemCImpl_sysenter);
5186}
5187
5188/** Opcode 0x0f 0x35. */
5189FNIEMOP_DEF(iemOp_sysexit)
5190{
5191 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5193 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5194 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5195 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5196}
5197
5198/** Opcode 0x0f 0x37. */
5199FNIEMOP_STUB(iemOp_getsec);
5200
5201
5202/** Opcode 0x0f 0x38. */
5203FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5204{
5205#ifdef IEM_WITH_THREE_0F_38
5206 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5207 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5208#else
5209 IEMOP_BITCH_ABOUT_STUB();
5210 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5211#endif
5212}
5213
5214
5215/** Opcode 0x0f 0x3a. */
5216FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5217{
5218#ifdef IEM_WITH_THREE_0F_3A
5219 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5220 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5221#else
5222 IEMOP_BITCH_ABOUT_STUB();
5223 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5224#endif
5225}
5226
5227
5228/**
5229 * Implements a conditional move.
5230 *
5231 * Wish there was an obvious way to do this where we could share and reduce
5232 * code bloat.
5233 *
5234 * @param a_Cnd The conditional "microcode" operation.
5235 */
5236#define CMOV_X(a_Cnd) \
5237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5238 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5239 { \
5240 switch (pVCpu->iem.s.enmEffOpSize) \
5241 { \
5242 case IEMMODE_16BIT: \
5243 IEM_MC_BEGIN(0, 1); \
5244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5245 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5246 a_Cnd { \
5247 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5248 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5249 } IEM_MC_ENDIF(); \
5250 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5251 IEM_MC_END(); \
5252 break; \
5253 \
5254 case IEMMODE_32BIT: \
5255 IEM_MC_BEGIN(0, 1); \
5256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5257 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5258 a_Cnd { \
5259 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5260 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5261 } IEM_MC_ELSE() { \
5262 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5263 } IEM_MC_ENDIF(); \
5264 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5265 IEM_MC_END(); \
5266 break; \
5267 \
5268 case IEMMODE_64BIT: \
5269 IEM_MC_BEGIN(0, 1); \
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5271 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5272 a_Cnd { \
5273 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5274 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5275 } IEM_MC_ENDIF(); \
5276 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5277 IEM_MC_END(); \
5278 break; \
5279 \
5280 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5281 } \
5282 } \
5283 else \
5284 { \
5285 switch (pVCpu->iem.s.enmEffOpSize) \
5286 { \
5287 case IEMMODE_16BIT: \
5288 IEM_MC_BEGIN(0, 2); \
5289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5290 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5293 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5294 a_Cnd { \
5295 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5296 } IEM_MC_ENDIF(); \
5297 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5298 IEM_MC_END(); \
5299 break; \
5300 \
5301 case IEMMODE_32BIT: \
5302 IEM_MC_BEGIN(0, 2); \
5303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5304 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5307 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5308 a_Cnd { \
5309 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5310 } IEM_MC_ELSE() { \
5311 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5312 } IEM_MC_ENDIF(); \
5313 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5314 IEM_MC_END(); \
5315 break; \
5316 \
5317 case IEMMODE_64BIT: \
5318 IEM_MC_BEGIN(0, 2); \
5319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5320 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5323 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5324 a_Cnd { \
5325 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5326 } IEM_MC_ENDIF(); \
5327 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5328 IEM_MC_END(); \
5329 break; \
5330 \
5331 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5332 } \
5333 } do {} while (0)
5334
5335
5336
5337/** Opcode 0x0f 0x40. */
5338FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5339{
5340 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5341 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5342}
5343
5344
5345/** Opcode 0x0f 0x41. */
5346FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5347{
5348 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5349 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5350}
5351
5352
5353/** Opcode 0x0f 0x42. */
5354FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5355{
5356 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5357 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5358}
5359
5360
5361/** Opcode 0x0f 0x43. */
5362FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5363{
5364 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5365 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5366}
5367
5368
5369/** Opcode 0x0f 0x44. */
5370FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5371{
5372 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5373 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5374}
5375
5376
5377/** Opcode 0x0f 0x45. */
5378FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5379{
5380 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5381 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5382}
5383
5384
5385/** Opcode 0x0f 0x46. */
5386FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5387{
5388 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5389 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5390}
5391
5392
5393/** Opcode 0x0f 0x47. */
5394FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5395{
5396 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5397 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5398}
5399
5400
5401/** Opcode 0x0f 0x48. */
5402FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5403{
5404 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5405 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5406}
5407
5408
5409/** Opcode 0x0f 0x49. */
5410FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5411{
5412 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5413 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5414}
5415
5416
5417/** Opcode 0x0f 0x4a. */
5418FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5419{
5420 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5421 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5422}
5423
5424
5425/** Opcode 0x0f 0x4b. */
5426FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5427{
5428 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5429 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5430}
5431
5432
5433/** Opcode 0x0f 0x4c. */
5434FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5435{
5436 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5437 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5438}
5439
5440
5441/** Opcode 0x0f 0x4d. */
5442FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5443{
5444 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5445 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5446}
5447
5448
5449/** Opcode 0x0f 0x4e. */
5450FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5451{
5452 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5453 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5454}
5455
5456
5457/** Opcode 0x0f 0x4f. */
5458FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5459{
5460 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5461 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5462}
5463
5464#undef CMOV_X
5465
5466/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5467FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5468{
5469 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5471 if (IEM_IS_MODRM_REG_MODE(bRm))
5472 {
5473 /*
5474 * Register, register.
5475 */
5476 IEM_MC_BEGIN(2, 1);
5477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5478 IEM_MC_LOCAL(uint8_t, u8Dst);
5479 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5480 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5481 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5482 IEM_MC_PREPARE_SSE_USAGE();
5483 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5484 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5485 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5486 IEM_MC_ADVANCE_RIP_AND_FINISH();
5487 IEM_MC_END();
5488 }
5489 /* No memory operand. */
5490 else
5491 IEMOP_RAISE_INVALID_OPCODE_RET();
5492}
5493
5494
5495/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5496FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5497{
5498 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5500 if (IEM_IS_MODRM_REG_MODE(bRm))
5501 {
5502 /*
5503 * Register, register.
5504 */
5505 IEM_MC_BEGIN(2, 1);
5506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5507 IEM_MC_LOCAL(uint8_t, u8Dst);
5508 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5509 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5510 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5511 IEM_MC_PREPARE_SSE_USAGE();
5512 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5513 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5515 IEM_MC_ADVANCE_RIP_AND_FINISH();
5516 IEM_MC_END();
5517 }
5518 /* No memory operand. */
5519 else
5520 IEMOP_RAISE_INVALID_OPCODE_RET();
5521
5522}
5523
5524
5525/* Opcode 0xf3 0x0f 0x50 - invalid */
5526/* Opcode 0xf2 0x0f 0x50 - invalid */
5527
5528
5529/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5530FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5531{
5532 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5533 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5534}
5535
5536
5537/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5538FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5539{
5540 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5541 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5542}
5543
5544
5545/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5546FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5547{
5548 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5549 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5550}
5551
5552
5553/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5554FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5555{
5556 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5557 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5558}
5559
5560
5561/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5562FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5563{
5564 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5565 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5566}
5567
5568
5569/* Opcode 0x66 0x0f 0x52 - invalid */
5570
5571
5572/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5573FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5574{
5575 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5576 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5577}
5578
5579
5580/* Opcode 0xf2 0x0f 0x52 - invalid */
5581
5582/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5583FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5584/* Opcode 0x66 0x0f 0x53 - invalid */
5585/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5586FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5587/* Opcode 0xf2 0x0f 0x53 - invalid */
5588
5589
5590/** Opcode 0x0f 0x54 - andps Vps, Wps */
5591FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5592{
5593 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5594 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5595}
5596
5597
5598/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5599FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5600{
5601 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5602 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5603}
5604
5605
5606/* Opcode 0xf3 0x0f 0x54 - invalid */
5607/* Opcode 0xf2 0x0f 0x54 - invalid */
5608
5609
5610/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5611FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5612{
5613 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5614 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5615}
5616
5617
5618/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5619FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5620{
5621 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5622 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5623}
5624
5625
5626/* Opcode 0xf3 0x0f 0x55 - invalid */
5627/* Opcode 0xf2 0x0f 0x55 - invalid */
5628
5629
5630/** Opcode 0x0f 0x56 - orps Vps, Wps */
5631FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5632{
5633 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5634 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5635}
5636
5637
5638/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5639FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5640{
5641 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5642 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5643}
5644
5645
5646/* Opcode 0xf3 0x0f 0x56 - invalid */
5647/* Opcode 0xf2 0x0f 0x56 - invalid */
5648
5649
5650/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5651FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5652{
5653 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5654 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5655}
5656
5657
5658/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5659FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5660{
5661 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5662 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5663}
5664
5665
5666/* Opcode 0xf3 0x0f 0x57 - invalid */
5667/* Opcode 0xf2 0x0f 0x57 - invalid */
5668
5669/** Opcode 0x0f 0x58 - addps Vps, Wps */
5670FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5671{
5672 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5673 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5674}
5675
5676
5677/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5678FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5679{
5680 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5681 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5682}
5683
5684
5685/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5686FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5687{
5688 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5689 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5690}
5691
5692
5693/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5694FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5695{
5696 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5697 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5698}
5699
5700
5701/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5702FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5703{
5704 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5705 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5706}
5707
5708
5709/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5710FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5711{
5712 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5713 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5714}
5715
5716
5717/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5718FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5719{
5720 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5721 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5722}
5723
5724
5725/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5726FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5727{
5728 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5729 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5730}
5731
5732
5733/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5734FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5735{
5736 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5737 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5738}
5739
5740
5741/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5742FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5743{
5744 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5745 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5746}
5747
5748
5749/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5750FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5751{
5752 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5753 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5754}
5755
5756
5757/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5758FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5759{
5760 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5761 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5762}
5763
5764
5765/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5766FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5767{
5768 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5769 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5770}
5771
5772
5773/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5774FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5775{
5776 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5777 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5778}
5779
5780
5781/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5782FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5783{
5784 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5785 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5786}
5787
5788
5789/* Opcode 0xf2 0x0f 0x5b - invalid */
5790
5791
5792/** Opcode 0x0f 0x5c - subps Vps, Wps */
5793FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5794{
5795 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5796 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5797}
5798
5799
5800/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5801FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5802{
5803 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5804 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5805}
5806
5807
5808/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5809FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5810{
5811 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5812 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5813}
5814
5815
5816/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5817FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5818{
5819 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5820 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5821}
5822
5823
5824/** Opcode 0x0f 0x5d - minps Vps, Wps */
5825FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5826{
5827 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5828 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5829}
5830
5831
5832/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5833FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5834{
5835 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5836 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5837}
5838
5839
5840/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5841FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5842{
5843 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5844 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5845}
5846
5847
5848/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5849FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5850{
5851 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5852 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5853}
5854
5855
5856/** Opcode 0x0f 0x5e - divps Vps, Wps */
5857FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5858{
5859 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5860 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5861}
5862
5863
5864/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5865FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5866{
5867 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5868 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5869}
5870
5871
5872/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5873FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5874{
5875 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5876 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5877}
5878
5879
5880/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5881FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5882{
5883 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5884 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5885}
5886
5887
5888/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5889FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5890{
5891 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5892 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5893}
5894
5895
5896/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5897FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5898{
5899 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5900 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5901}
5902
5903
5904/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5905FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5906{
5907 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5908 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5909}
5910
5911
5912/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5913FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5914{
5915 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5916 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5917}
5918
5919
5920/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5921FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5922{
5923 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5924 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5925}
5926
5927
5928/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5929FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5930{
5931 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5932 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5933}
5934
5935
5936/* Opcode 0xf3 0x0f 0x60 - invalid */
5937
5938
5939/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5940FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5941{
5942 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5943 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5944 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5945}
5946
5947
5948/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5949FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5950{
5951 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5952 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5953}
5954
5955
5956/* Opcode 0xf3 0x0f 0x61 - invalid */
5957
5958
5959/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5960FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5961{
5962 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5963 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5964}
5965
5966
5967/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5968FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5969{
5970 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5971 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5972}
5973
5974
5975/* Opcode 0xf3 0x0f 0x62 - invalid */
5976
5977
5978
5979/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5980FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5981{
5982 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5983 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5984}
5985
5986
5987/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5988FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5989{
5990 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5991 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5992}
5993
5994
5995/* Opcode 0xf3 0x0f 0x63 - invalid */
5996
5997
5998/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5999FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
6000{
6001 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6002 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6003}
6004
6005
6006/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6007FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6008{
6009 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6010 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6011}
6012
6013
6014/* Opcode 0xf3 0x0f 0x64 - invalid */
6015
6016
6017/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6018FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6019{
6020 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6021 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6022}
6023
6024
6025/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6026FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6027{
6028 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6029 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6030}
6031
6032
6033/* Opcode 0xf3 0x0f 0x65 - invalid */
6034
6035
6036/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6037FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6038{
6039 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6040 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6041}
6042
6043
6044/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6045FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6046{
6047 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6048 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6049}
6050
6051
6052/* Opcode 0xf3 0x0f 0x66 - invalid */
6053
6054
6055/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6056FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6057{
6058 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6059 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6060}
6061
6062
6063/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6064FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6065{
6066 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6067 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6068}
6069
6070
6071/* Opcode 0xf3 0x0f 0x67 - invalid */
6072
6073
6074/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6075 * @note Intel and AMD both uses Qd for the second parameter, however they
6076 * both list it as a mmX/mem64 operand and intel describes it as being
6077 * loaded as a qword, so it should be Qq, shouldn't it? */
6078FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6079{
6080 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6081 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6082}
6083
6084
6085/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6086FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6087{
6088 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6089 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6090}
6091
6092
6093/* Opcode 0xf3 0x0f 0x68 - invalid */
6094
6095
6096/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6097 * @note Intel and AMD both uses Qd for the second parameter, however they
6098 * both list it as a mmX/mem64 operand and intel describes it as being
6099 * loaded as a qword, so it should be Qq, shouldn't it? */
6100FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6101{
6102 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6103 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6104}
6105
6106
6107/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6108FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6109{
6110 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6111 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6112
6113}
6114
6115
6116/* Opcode 0xf3 0x0f 0x69 - invalid */
6117
6118
6119/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6120 * @note Intel and AMD both uses Qd for the second parameter, however they
6121 * both list it as a mmX/mem64 operand and intel describes it as being
6122 * loaded as a qword, so it should be Qq, shouldn't it? */
6123FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6124{
6125 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6126 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6127}
6128
6129
6130/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6131FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6132{
6133 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6134 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6135}
6136
6137
6138/* Opcode 0xf3 0x0f 0x6a - invalid */
6139
6140
6141/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6142FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6143{
6144 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6145 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6146}
6147
6148
6149/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6150FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6151{
6152 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6153 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6154}
6155
6156
6157/* Opcode 0xf3 0x0f 0x6b - invalid */
6158
6159
6160/* Opcode 0x0f 0x6c - invalid */
6161
6162
6163/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6164FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6165{
6166 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6167 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6168}
6169
6170
6171/* Opcode 0xf3 0x0f 0x6c - invalid */
6172/* Opcode 0xf2 0x0f 0x6c - invalid */
6173
6174
6175/* Opcode 0x0f 0x6d - invalid */
6176
6177
6178/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6179FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6180{
6181 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6182 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6183}
6184
6185
6186/* Opcode 0xf3 0x0f 0x6d - invalid */
6187
6188
6189FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6190{
6191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6192 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6193 {
6194 /**
6195 * @opcode 0x6e
6196 * @opcodesub rex.w=1
6197 * @oppfx none
6198 * @opcpuid mmx
6199 * @opgroup og_mmx_datamove
6200 * @opxcpttype 5
6201 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6202 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6203 */
6204 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6205 if (IEM_IS_MODRM_REG_MODE(bRm))
6206 {
6207 /* MMX, greg64 */
6208 IEM_MC_BEGIN(0, 1);
6209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6210 IEM_MC_LOCAL(uint64_t, u64Tmp);
6211
6212 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6213 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6214 IEM_MC_FPU_TO_MMX_MODE();
6215
6216 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6217 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6218
6219 IEM_MC_ADVANCE_RIP_AND_FINISH();
6220 IEM_MC_END();
6221 }
6222 else
6223 {
6224 /* MMX, [mem64] */
6225 IEM_MC_BEGIN(0, 2);
6226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6227 IEM_MC_LOCAL(uint64_t, u64Tmp);
6228
6229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6231 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6233 IEM_MC_FPU_TO_MMX_MODE();
6234
6235 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6236 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6237
6238 IEM_MC_ADVANCE_RIP_AND_FINISH();
6239 IEM_MC_END();
6240 }
6241 }
6242 else
6243 {
6244 /**
6245 * @opdone
6246 * @opcode 0x6e
6247 * @opcodesub rex.w=0
6248 * @oppfx none
6249 * @opcpuid mmx
6250 * @opgroup og_mmx_datamove
6251 * @opxcpttype 5
6252 * @opfunction iemOp_movd_q_Pd_Ey
6253 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6254 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6255 */
6256 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6257 if (IEM_IS_MODRM_REG_MODE(bRm))
6258 {
6259 /* MMX, greg32 */
6260 IEM_MC_BEGIN(0, 1);
6261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6262 IEM_MC_LOCAL(uint32_t, u32Tmp);
6263
6264 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6265 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6266 IEM_MC_FPU_TO_MMX_MODE();
6267
6268 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6269 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6270
6271 IEM_MC_ADVANCE_RIP_AND_FINISH();
6272 IEM_MC_END();
6273 }
6274 else
6275 {
6276 /* MMX, [mem32] */
6277 IEM_MC_BEGIN(0, 2);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6279 IEM_MC_LOCAL(uint32_t, u32Tmp);
6280
6281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6284 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6285 IEM_MC_FPU_TO_MMX_MODE();
6286
6287 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6288 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6289
6290 IEM_MC_ADVANCE_RIP_AND_FINISH();
6291 IEM_MC_END();
6292 }
6293 }
6294}
6295
6296FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6297{
6298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6299 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6300 {
6301 /**
6302 * @opcode 0x6e
6303 * @opcodesub rex.w=1
6304 * @oppfx 0x66
6305 * @opcpuid sse2
6306 * @opgroup og_sse2_simdint_datamove
6307 * @opxcpttype 5
6308 * @optest 64-bit / op1=1 op2=2 -> op1=2
6309 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6310 */
6311 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6312 if (IEM_IS_MODRM_REG_MODE(bRm))
6313 {
6314 /* XMM, greg64 */
6315 IEM_MC_BEGIN(0, 1);
6316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6317 IEM_MC_LOCAL(uint64_t, u64Tmp);
6318
6319 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6320 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6321
6322 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6323 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6324
6325 IEM_MC_ADVANCE_RIP_AND_FINISH();
6326 IEM_MC_END();
6327 }
6328 else
6329 {
6330 /* XMM, [mem64] */
6331 IEM_MC_BEGIN(0, 2);
6332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6333 IEM_MC_LOCAL(uint64_t, u64Tmp);
6334
6335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6337 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6338 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6339
6340 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6341 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6342
6343 IEM_MC_ADVANCE_RIP_AND_FINISH();
6344 IEM_MC_END();
6345 }
6346 }
6347 else
6348 {
6349 /**
6350 * @opdone
6351 * @opcode 0x6e
6352 * @opcodesub rex.w=0
6353 * @oppfx 0x66
6354 * @opcpuid sse2
6355 * @opgroup og_sse2_simdint_datamove
6356 * @opxcpttype 5
6357 * @opfunction iemOp_movd_q_Vy_Ey
6358 * @optest op1=1 op2=2 -> op1=2
6359 * @optest op1=0 op2=-42 -> op1=-42
6360 */
6361 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6362 if (IEM_IS_MODRM_REG_MODE(bRm))
6363 {
6364 /* XMM, greg32 */
6365 IEM_MC_BEGIN(0, 1);
6366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6367 IEM_MC_LOCAL(uint32_t, u32Tmp);
6368
6369 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6370 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6371
6372 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6373 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6374
6375 IEM_MC_ADVANCE_RIP_AND_FINISH();
6376 IEM_MC_END();
6377 }
6378 else
6379 {
6380 /* XMM, [mem32] */
6381 IEM_MC_BEGIN(0, 2);
6382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6383 IEM_MC_LOCAL(uint32_t, u32Tmp);
6384
6385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6388 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6389
6390 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6391 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6392
6393 IEM_MC_ADVANCE_RIP_AND_FINISH();
6394 IEM_MC_END();
6395 }
6396 }
6397}
6398
6399/* Opcode 0xf3 0x0f 0x6e - invalid */
6400
6401
6402/**
6403 * @opcode 0x6f
6404 * @oppfx none
6405 * @opcpuid mmx
6406 * @opgroup og_mmx_datamove
6407 * @opxcpttype 5
6408 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6409 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6410 */
6411FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6412{
6413 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6415 if (IEM_IS_MODRM_REG_MODE(bRm))
6416 {
6417 /*
6418 * Register, register.
6419 */
6420 IEM_MC_BEGIN(0, 1);
6421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6422 IEM_MC_LOCAL(uint64_t, u64Tmp);
6423
6424 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6425 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6426 IEM_MC_FPU_TO_MMX_MODE();
6427
6428 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6429 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6430
6431 IEM_MC_ADVANCE_RIP_AND_FINISH();
6432 IEM_MC_END();
6433 }
6434 else
6435 {
6436 /*
6437 * Register, memory.
6438 */
6439 IEM_MC_BEGIN(0, 2);
6440 IEM_MC_LOCAL(uint64_t, u64Tmp);
6441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6442
6443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6445 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6446 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6447 IEM_MC_FPU_TO_MMX_MODE();
6448
6449 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6450 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6451
6452 IEM_MC_ADVANCE_RIP_AND_FINISH();
6453 IEM_MC_END();
6454 }
6455}
6456
6457/**
6458 * @opcode 0x6f
6459 * @oppfx 0x66
6460 * @opcpuid sse2
6461 * @opgroup og_sse2_simdint_datamove
6462 * @opxcpttype 1
6463 * @optest op1=1 op2=2 -> op1=2
6464 * @optest op1=0 op2=-42 -> op1=-42
6465 */
6466FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6467{
6468 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6470 if (IEM_IS_MODRM_REG_MODE(bRm))
6471 {
6472 /*
6473 * Register, register.
6474 */
6475 IEM_MC_BEGIN(0, 0);
6476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6477
6478 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6479 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6480
6481 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6482 IEM_GET_MODRM_RM(pVCpu, bRm));
6483 IEM_MC_ADVANCE_RIP_AND_FINISH();
6484 IEM_MC_END();
6485 }
6486 else
6487 {
6488 /*
6489 * Register, memory.
6490 */
6491 IEM_MC_BEGIN(0, 2);
6492 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6494
6495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6499
6500 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6501 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6502
6503 IEM_MC_ADVANCE_RIP_AND_FINISH();
6504 IEM_MC_END();
6505 }
6506}
6507
6508/**
6509 * @opcode 0x6f
6510 * @oppfx 0xf3
6511 * @opcpuid sse2
6512 * @opgroup og_sse2_simdint_datamove
6513 * @opxcpttype 4UA
6514 * @optest op1=1 op2=2 -> op1=2
6515 * @optest op1=0 op2=-42 -> op1=-42
6516 */
6517FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6518{
6519 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6521 if (IEM_IS_MODRM_REG_MODE(bRm))
6522 {
6523 /*
6524 * Register, register.
6525 */
6526 IEM_MC_BEGIN(0, 0);
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6528 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6529 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6530 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6531 IEM_GET_MODRM_RM(pVCpu, bRm));
6532 IEM_MC_ADVANCE_RIP_AND_FINISH();
6533 IEM_MC_END();
6534 }
6535 else
6536 {
6537 /*
6538 * Register, memory.
6539 */
6540 IEM_MC_BEGIN(0, 2);
6541 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6543
6544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6546 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6547 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6548 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6549 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6550
6551 IEM_MC_ADVANCE_RIP_AND_FINISH();
6552 IEM_MC_END();
6553 }
6554}
6555
6556
6557/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6558FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6559{
6560 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6562 if (IEM_IS_MODRM_REG_MODE(bRm))
6563 {
6564 /*
6565 * Register, register.
6566 */
6567 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6568 IEM_MC_BEGIN(3, 0);
6569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6570 IEM_MC_ARG(uint64_t *, pDst, 0);
6571 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6572 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6573 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6574 IEM_MC_PREPARE_FPU_USAGE();
6575 IEM_MC_FPU_TO_MMX_MODE();
6576
6577 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6578 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6580 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6581
6582 IEM_MC_ADVANCE_RIP_AND_FINISH();
6583 IEM_MC_END();
6584 }
6585 else
6586 {
6587 /*
6588 * Register, memory.
6589 */
6590 IEM_MC_BEGIN(3, 2);
6591 IEM_MC_ARG(uint64_t *, pDst, 0);
6592 IEM_MC_LOCAL(uint64_t, uSrc);
6593 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6595
6596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6597 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6598 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6600 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6602
6603 IEM_MC_PREPARE_FPU_USAGE();
6604 IEM_MC_FPU_TO_MMX_MODE();
6605
6606 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6607 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6608 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6609
6610 IEM_MC_ADVANCE_RIP_AND_FINISH();
6611 IEM_MC_END();
6612 }
6613}
6614
6615
6616/**
6617 * Common worker for SSE2 instructions on the forms:
6618 * pshufd xmm1, xmm2/mem128, imm8
6619 * pshufhw xmm1, xmm2/mem128, imm8
6620 * pshuflw xmm1, xmm2/mem128, imm8
6621 *
6622 * Proper alignment of the 128-bit operand is enforced.
6623 * Exceptions type 4. SSE2 cpuid checks.
6624 */
6625FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6626{
6627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6628 if (IEM_IS_MODRM_REG_MODE(bRm))
6629 {
6630 /*
6631 * Register, register.
6632 */
6633 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6634 IEM_MC_BEGIN(3, 0);
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6636 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6637 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6638 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6639 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6640 IEM_MC_PREPARE_SSE_USAGE();
6641 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6642 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6643 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6644 IEM_MC_ADVANCE_RIP_AND_FINISH();
6645 IEM_MC_END();
6646 }
6647 else
6648 {
6649 /*
6650 * Register, memory.
6651 */
6652 IEM_MC_BEGIN(3, 2);
6653 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6654 IEM_MC_LOCAL(RTUINT128U, uSrc);
6655 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6657
6658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6659 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6660 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6662 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6663
6664 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6665 IEM_MC_PREPARE_SSE_USAGE();
6666 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6667 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6668
6669 IEM_MC_ADVANCE_RIP_AND_FINISH();
6670 IEM_MC_END();
6671 }
6672}
6673
6674
6675/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6676FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6677{
6678 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6679 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6680}
6681
6682
6683/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6684FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6685{
6686 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6687 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6688}
6689
6690
6691/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6692FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6693{
6694 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6695 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6696}
6697
6698
6699/**
6700 * Common worker for MMX instructions of the form:
6701 * psrlw mm, imm8
6702 * psraw mm, imm8
6703 * psllw mm, imm8
6704 * psrld mm, imm8
6705 * psrad mm, imm8
6706 * pslld mm, imm8
6707 * psrlq mm, imm8
6708 * psllq mm, imm8
6709 *
6710 */
6711FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6712{
6713 if (IEM_IS_MODRM_REG_MODE(bRm))
6714 {
6715 /*
6716 * Register, immediate.
6717 */
6718 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6719 IEM_MC_BEGIN(2, 0);
6720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6721 IEM_MC_ARG(uint64_t *, pDst, 0);
6722 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6723 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6724 IEM_MC_PREPARE_FPU_USAGE();
6725 IEM_MC_FPU_TO_MMX_MODE();
6726
6727 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6728 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6729 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6730
6731 IEM_MC_ADVANCE_RIP_AND_FINISH();
6732 IEM_MC_END();
6733 }
6734 else
6735 {
6736 /*
6737 * Register, memory not supported.
6738 */
6739 /// @todo Caller already enforced register mode?!
6740 AssertFailedReturn(VINF_SUCCESS);
6741 }
6742}
6743
6744
6745/**
6746 * Common worker for SSE2 instructions of the form:
6747 * psrlw xmm, imm8
6748 * psraw xmm, imm8
6749 * psllw xmm, imm8
6750 * psrld xmm, imm8
6751 * psrad xmm, imm8
6752 * pslld xmm, imm8
6753 * psrlq xmm, imm8
6754 * psllq xmm, imm8
6755 *
6756 */
6757FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6758{
6759 if (IEM_IS_MODRM_REG_MODE(bRm))
6760 {
6761 /*
6762 * Register, immediate.
6763 */
6764 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6765 IEM_MC_BEGIN(2, 0);
6766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6767 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6768 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6769 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6770 IEM_MC_PREPARE_SSE_USAGE();
6771 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6772 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6773 IEM_MC_ADVANCE_RIP_AND_FINISH();
6774 IEM_MC_END();
6775 }
6776 else
6777 {
6778 /*
6779 * Register, memory.
6780 */
6781 /// @todo Caller already enforced register mode?!
6782 AssertFailedReturn(VINF_SUCCESS);
6783 }
6784}
6785
6786
6787/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6788FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6789{
6790// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6791 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6792}
6793
6794
6795/** Opcode 0x66 0x0f 0x71 11/2. */
6796FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6797{
6798// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6799 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6800}
6801
6802
6803/** Opcode 0x0f 0x71 11/4. */
6804FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6805{
6806// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6807 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6808}
6809
6810
6811/** Opcode 0x66 0x0f 0x71 11/4. */
6812FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6813{
6814// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6815 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6816}
6817
6818
6819/** Opcode 0x0f 0x71 11/6. */
6820FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6821{
6822// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6823 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6824}
6825
6826
6827/** Opcode 0x66 0x0f 0x71 11/6. */
6828FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6829{
6830// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6831 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6832}
6833
6834
6835/**
6836 * Group 12 jump table for register variant.
6837 */
6838IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6839{
6840 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6841 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6842 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6843 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6844 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6845 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6846 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6847 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6848};
6849AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6850
6851
6852/** Opcode 0x0f 0x71. */
6853FNIEMOP_DEF(iemOp_Grp12)
6854{
6855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6856 if (IEM_IS_MODRM_REG_MODE(bRm))
6857 /* register, register */
6858 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6859 + pVCpu->iem.s.idxPrefix], bRm);
6860 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6861}
6862
6863
6864/** Opcode 0x0f 0x72 11/2. */
6865FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6866{
6867// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6868 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6869}
6870
6871
6872/** Opcode 0x66 0x0f 0x72 11/2. */
6873FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6874{
6875// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6876 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6877}
6878
6879
6880/** Opcode 0x0f 0x72 11/4. */
6881FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6882{
6883// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6884 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6885}
6886
6887
6888/** Opcode 0x66 0x0f 0x72 11/4. */
6889FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6890{
6891// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6892 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6893}
6894
6895
6896/** Opcode 0x0f 0x72 11/6. */
6897FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6898{
6899// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6900 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6901}
6902
6903/** Opcode 0x66 0x0f 0x72 11/6. */
6904FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6905{
6906// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6907 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6908}
6909
6910
6911/**
6912 * Group 13 jump table for register variant.
6913 */
6914IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6915{
6916 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6917 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6918 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6919 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6920 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6921 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6922 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6923 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6924};
6925AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6926
6927/** Opcode 0x0f 0x72. */
6928FNIEMOP_DEF(iemOp_Grp13)
6929{
6930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6931 if (IEM_IS_MODRM_REG_MODE(bRm))
6932 /* register, register */
6933 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6934 + pVCpu->iem.s.idxPrefix], bRm);
6935 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6936}
6937
6938
6939/** Opcode 0x0f 0x73 11/2. */
6940FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6941{
6942// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6943 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6944}
6945
6946
6947/** Opcode 0x66 0x0f 0x73 11/2. */
6948FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6949{
6950// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6951 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6952}
6953
6954
6955/** Opcode 0x66 0x0f 0x73 11/3. */
6956FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6957{
6958// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6959 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6960}
6961
6962
6963/** Opcode 0x0f 0x73 11/6. */
6964FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6965{
6966// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6967 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6968}
6969
6970
6971/** Opcode 0x66 0x0f 0x73 11/6. */
6972FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6973{
6974// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6975 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6976}
6977
6978
6979/** Opcode 0x66 0x0f 0x73 11/7. */
6980FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6981{
6982// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6983 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6984}
6985
6986/**
6987 * Group 14 jump table for register variant.
6988 */
6989IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6990{
6991 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6992 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6993 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6994 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6995 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6996 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6997 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6998 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6999};
7000AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
7001
7002
7003/** Opcode 0x0f 0x73. */
7004FNIEMOP_DEF(iemOp_Grp14)
7005{
7006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7007 if (IEM_IS_MODRM_REG_MODE(bRm))
7008 /* register, register */
7009 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7010 + pVCpu->iem.s.idxPrefix], bRm);
7011 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7012}
7013
7014
7015/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7016FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7017{
7018 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7019 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7020}
7021
7022
7023/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7024FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7025{
7026 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7027 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7028}
7029
7030
7031/* Opcode 0xf3 0x0f 0x74 - invalid */
7032/* Opcode 0xf2 0x0f 0x74 - invalid */
7033
7034
7035/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7036FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7037{
7038 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7039 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7040}
7041
7042
7043/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7044FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7045{
7046 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7047 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7048}
7049
7050
7051/* Opcode 0xf3 0x0f 0x75 - invalid */
7052/* Opcode 0xf2 0x0f 0x75 - invalid */
7053
7054
7055/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7056FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7057{
7058 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7059 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7060}
7061
7062
7063/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7064FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7065{
7066 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7067 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7068}
7069
7070
7071/* Opcode 0xf3 0x0f 0x76 - invalid */
7072/* Opcode 0xf2 0x0f 0x76 - invalid */
7073
7074
7075/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7076FNIEMOP_DEF(iemOp_emms)
7077{
7078 IEMOP_MNEMONIC(emms, "emms");
7079 IEM_MC_BEGIN(0,0);
7080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7082 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7083 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7084 IEM_MC_FPU_FROM_MMX_MODE();
7085 IEM_MC_ADVANCE_RIP_AND_FINISH();
7086 IEM_MC_END();
7087}
7088
7089/* Opcode 0x66 0x0f 0x77 - invalid */
7090/* Opcode 0xf3 0x0f 0x77 - invalid */
7091/* Opcode 0xf2 0x0f 0x77 - invalid */
7092
7093/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7094#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7095FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7096{
7097 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7098 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7099 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7100 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7101
7102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7103 if (IEM_IS_MODRM_REG_MODE(bRm))
7104 {
7105 /*
7106 * Register, register.
7107 */
7108 if (enmEffOpSize == IEMMODE_64BIT)
7109 {
7110 IEM_MC_BEGIN(2, 0);
7111 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7113 IEM_MC_ARG(uint64_t, u64Enc, 1);
7114 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7115 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7116 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7117 IEM_MC_END();
7118 }
7119 else
7120 {
7121 IEM_MC_BEGIN(2, 0);
7122 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7123 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7124 IEM_MC_ARG(uint32_t, u32Enc, 1);
7125 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7126 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7127 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7128 IEM_MC_END();
7129 }
7130 }
7131 else
7132 {
7133 /*
7134 * Memory, register.
7135 */
7136 if (enmEffOpSize == IEMMODE_64BIT)
7137 {
7138 IEM_MC_BEGIN(3, 0);
7139 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7140 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7141 IEM_MC_ARG(uint64_t, u64Enc, 2);
7142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7143 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7144 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7145 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7146 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7147 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7148 IEM_MC_END();
7149 }
7150 else
7151 {
7152 IEM_MC_BEGIN(3, 0);
7153 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7154 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7155 IEM_MC_ARG(uint32_t, u32Enc, 2);
7156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7157 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7158 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7159 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7160 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7161 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7162 IEM_MC_END();
7163 }
7164 }
7165}
7166#else
7167FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7168#endif
7169
7170/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7171FNIEMOP_STUB(iemOp_AmdGrp17);
7172/* Opcode 0xf3 0x0f 0x78 - invalid */
7173/* Opcode 0xf2 0x0f 0x78 - invalid */
7174
7175/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7176#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7177FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7178{
7179 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7180 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7181 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7182 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7183
7184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7185 if (IEM_IS_MODRM_REG_MODE(bRm))
7186 {
7187 /*
7188 * Register, register.
7189 */
7190 if (enmEffOpSize == IEMMODE_64BIT)
7191 {
7192 IEM_MC_BEGIN(2, 0);
7193 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7194 IEM_MC_ARG(uint64_t, u64Val, 0);
7195 IEM_MC_ARG(uint64_t, u64Enc, 1);
7196 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7197 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7198 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7199 IEM_MC_END();
7200 }
7201 else
7202 {
7203 IEM_MC_BEGIN(2, 0);
7204 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7205 IEM_MC_ARG(uint32_t, u32Val, 0);
7206 IEM_MC_ARG(uint32_t, u32Enc, 1);
7207 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7208 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7209 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7210 IEM_MC_END();
7211 }
7212 }
7213 else
7214 {
7215 /*
7216 * Register, memory.
7217 */
7218 if (enmEffOpSize == IEMMODE_64BIT)
7219 {
7220 IEM_MC_BEGIN(3, 0);
7221 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7222 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7223 IEM_MC_ARG(uint64_t, u64Enc, 2);
7224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7225 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7226 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7227 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7228 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7229 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7230 IEM_MC_END();
7231 }
7232 else
7233 {
7234 IEM_MC_BEGIN(3, 0);
7235 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7236 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7237 IEM_MC_ARG(uint32_t, u32Enc, 2);
7238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7239 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7240 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7241 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7242 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7243 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7244 IEM_MC_END();
7245 }
7246 }
7247}
7248#else
7249FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7250#endif
7251/* Opcode 0x66 0x0f 0x79 - invalid */
7252/* Opcode 0xf3 0x0f 0x79 - invalid */
7253/* Opcode 0xf2 0x0f 0x79 - invalid */
7254
7255/* Opcode 0x0f 0x7a - invalid */
7256/* Opcode 0x66 0x0f 0x7a - invalid */
7257/* Opcode 0xf3 0x0f 0x7a - invalid */
7258/* Opcode 0xf2 0x0f 0x7a - invalid */
7259
7260/* Opcode 0x0f 0x7b - invalid */
7261/* Opcode 0x66 0x0f 0x7b - invalid */
7262/* Opcode 0xf3 0x0f 0x7b - invalid */
7263/* Opcode 0xf2 0x0f 0x7b - invalid */
7264
7265/* Opcode 0x0f 0x7c - invalid */
7266
7267
7268/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7269FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7270{
7271 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7272 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7273}
7274
7275
7276/* Opcode 0xf3 0x0f 0x7c - invalid */
7277
7278
7279/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7280FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7281{
7282 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7283 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7284}
7285
7286
7287/* Opcode 0x0f 0x7d - invalid */
7288
7289
7290/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7291FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7292{
7293 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7294 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7295}
7296
7297
7298/* Opcode 0xf3 0x0f 0x7d - invalid */
7299
7300
7301/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7302FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7303{
7304 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7305 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7306}
7307
7308
7309/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7310FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7311{
7312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7313 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7314 {
7315 /**
7316 * @opcode 0x7e
7317 * @opcodesub rex.w=1
7318 * @oppfx none
7319 * @opcpuid mmx
7320 * @opgroup og_mmx_datamove
7321 * @opxcpttype 5
7322 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7323 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7324 */
7325 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7326 if (IEM_IS_MODRM_REG_MODE(bRm))
7327 {
7328 /* greg64, MMX */
7329 IEM_MC_BEGIN(0, 1);
7330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7331 IEM_MC_LOCAL(uint64_t, u64Tmp);
7332
7333 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7334 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7335 IEM_MC_FPU_TO_MMX_MODE();
7336
7337 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7338 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7339
7340 IEM_MC_ADVANCE_RIP_AND_FINISH();
7341 IEM_MC_END();
7342 }
7343 else
7344 {
7345 /* [mem64], MMX */
7346 IEM_MC_BEGIN(0, 2);
7347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7348 IEM_MC_LOCAL(uint64_t, u64Tmp);
7349
7350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7352 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7353 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7354 IEM_MC_FPU_TO_MMX_MODE();
7355
7356 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7357 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7358
7359 IEM_MC_ADVANCE_RIP_AND_FINISH();
7360 IEM_MC_END();
7361 }
7362 }
7363 else
7364 {
7365 /**
7366 * @opdone
7367 * @opcode 0x7e
7368 * @opcodesub rex.w=0
7369 * @oppfx none
7370 * @opcpuid mmx
7371 * @opgroup og_mmx_datamove
7372 * @opxcpttype 5
7373 * @opfunction iemOp_movd_q_Pd_Ey
7374 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7375 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7376 */
7377 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7378 if (IEM_IS_MODRM_REG_MODE(bRm))
7379 {
7380 /* greg32, MMX */
7381 IEM_MC_BEGIN(0, 1);
7382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7383 IEM_MC_LOCAL(uint32_t, u32Tmp);
7384
7385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7386 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7387 IEM_MC_FPU_TO_MMX_MODE();
7388
7389 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7390 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7391
7392 IEM_MC_ADVANCE_RIP_AND_FINISH();
7393 IEM_MC_END();
7394 }
7395 else
7396 {
7397 /* [mem32], MMX */
7398 IEM_MC_BEGIN(0, 2);
7399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7400 IEM_MC_LOCAL(uint32_t, u32Tmp);
7401
7402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7404 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7405 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7406 IEM_MC_FPU_TO_MMX_MODE();
7407
7408 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7409 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7410
7411 IEM_MC_ADVANCE_RIP_AND_FINISH();
7412 IEM_MC_END();
7413 }
7414 }
7415}
7416
7417
7418FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7419{
7420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7421 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7422 {
7423 /**
7424 * @opcode 0x7e
7425 * @opcodesub rex.w=1
7426 * @oppfx 0x66
7427 * @opcpuid sse2
7428 * @opgroup og_sse2_simdint_datamove
7429 * @opxcpttype 5
7430 * @optest 64-bit / op1=1 op2=2 -> op1=2
7431 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7432 */
7433 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7434 if (IEM_IS_MODRM_REG_MODE(bRm))
7435 {
7436 /* greg64, XMM */
7437 IEM_MC_BEGIN(0, 1);
7438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7439 IEM_MC_LOCAL(uint64_t, u64Tmp);
7440
7441 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7443
7444 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7445 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7446
7447 IEM_MC_ADVANCE_RIP_AND_FINISH();
7448 IEM_MC_END();
7449 }
7450 else
7451 {
7452 /* [mem64], XMM */
7453 IEM_MC_BEGIN(0, 2);
7454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7455 IEM_MC_LOCAL(uint64_t, u64Tmp);
7456
7457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7459 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7460 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7461
7462 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7463 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7464
7465 IEM_MC_ADVANCE_RIP_AND_FINISH();
7466 IEM_MC_END();
7467 }
7468 }
7469 else
7470 {
7471 /**
7472 * @opdone
7473 * @opcode 0x7e
7474 * @opcodesub rex.w=0
7475 * @oppfx 0x66
7476 * @opcpuid sse2
7477 * @opgroup og_sse2_simdint_datamove
7478 * @opxcpttype 5
7479 * @opfunction iemOp_movd_q_Vy_Ey
7480 * @optest op1=1 op2=2 -> op1=2
7481 * @optest op1=0 op2=-42 -> op1=-42
7482 */
7483 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7484 if (IEM_IS_MODRM_REG_MODE(bRm))
7485 {
7486 /* greg32, XMM */
7487 IEM_MC_BEGIN(0, 1);
7488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7489 IEM_MC_LOCAL(uint32_t, u32Tmp);
7490
7491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7493
7494 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7495 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7496
7497 IEM_MC_ADVANCE_RIP_AND_FINISH();
7498 IEM_MC_END();
7499 }
7500 else
7501 {
7502 /* [mem32], XMM */
7503 IEM_MC_BEGIN(0, 2);
7504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7505 IEM_MC_LOCAL(uint32_t, u32Tmp);
7506
7507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7509 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7510 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7511
7512 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7513 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7514
7515 IEM_MC_ADVANCE_RIP_AND_FINISH();
7516 IEM_MC_END();
7517 }
7518 }
7519}
7520
7521/**
7522 * @opcode 0x7e
7523 * @oppfx 0xf3
7524 * @opcpuid sse2
7525 * @opgroup og_sse2_pcksclr_datamove
7526 * @opxcpttype none
7527 * @optest op1=1 op2=2 -> op1=2
7528 * @optest op1=0 op2=-42 -> op1=-42
7529 */
7530FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7531{
7532 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7534 if (IEM_IS_MODRM_REG_MODE(bRm))
7535 {
7536 /*
7537 * XMM128, XMM64.
7538 */
7539 IEM_MC_BEGIN(0, 2);
7540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7541 IEM_MC_LOCAL(uint64_t, uSrc);
7542
7543 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7544 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7545
7546 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7547 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7548
7549 IEM_MC_ADVANCE_RIP_AND_FINISH();
7550 IEM_MC_END();
7551 }
7552 else
7553 {
7554 /*
7555 * XMM128, [mem64].
7556 */
7557 IEM_MC_BEGIN(0, 2);
7558 IEM_MC_LOCAL(uint64_t, uSrc);
7559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7560
7561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7565
7566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7567 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7568
7569 IEM_MC_ADVANCE_RIP_AND_FINISH();
7570 IEM_MC_END();
7571 }
7572}
7573
7574/* Opcode 0xf2 0x0f 0x7e - invalid */
7575
7576
7577/** Opcode 0x0f 0x7f - movq Qq, Pq */
7578FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7579{
7580 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7582 if (IEM_IS_MODRM_REG_MODE(bRm))
7583 {
7584 /*
7585 * MMX, MMX.
7586 */
7587 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7588 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7589 IEM_MC_BEGIN(0, 1);
7590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7591 IEM_MC_LOCAL(uint64_t, u64Tmp);
7592 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7593 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7594 IEM_MC_FPU_TO_MMX_MODE();
7595
7596 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7597 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7598
7599 IEM_MC_ADVANCE_RIP_AND_FINISH();
7600 IEM_MC_END();
7601 }
7602 else
7603 {
7604 /*
7605 * [mem64], MMX.
7606 */
7607 IEM_MC_BEGIN(0, 2);
7608 IEM_MC_LOCAL(uint64_t, u64Tmp);
7609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7610
7611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7613 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7615 IEM_MC_FPU_TO_MMX_MODE();
7616
7617 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7618 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7619
7620 IEM_MC_ADVANCE_RIP_AND_FINISH();
7621 IEM_MC_END();
7622 }
7623}
7624
7625/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7626FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7627{
7628 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7630 if (IEM_IS_MODRM_REG_MODE(bRm))
7631 {
7632 /*
7633 * XMM, XMM.
7634 */
7635 IEM_MC_BEGIN(0, 0);
7636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7637 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7638 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7639 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7640 IEM_GET_MODRM_REG(pVCpu, bRm));
7641 IEM_MC_ADVANCE_RIP_AND_FINISH();
7642 IEM_MC_END();
7643 }
7644 else
7645 {
7646 /*
7647 * [mem128], XMM.
7648 */
7649 IEM_MC_BEGIN(0, 2);
7650 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7652
7653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7656 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7657
7658 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7659 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7660
7661 IEM_MC_ADVANCE_RIP_AND_FINISH();
7662 IEM_MC_END();
7663 }
7664}
7665
7666/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7667FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7668{
7669 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7671 if (IEM_IS_MODRM_REG_MODE(bRm))
7672 {
7673 /*
7674 * XMM, XMM.
7675 */
7676 IEM_MC_BEGIN(0, 0);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7678 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7679 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7680 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7681 IEM_GET_MODRM_REG(pVCpu, bRm));
7682 IEM_MC_ADVANCE_RIP_AND_FINISH();
7683 IEM_MC_END();
7684 }
7685 else
7686 {
7687 /*
7688 * [mem128], XMM.
7689 */
7690 IEM_MC_BEGIN(0, 2);
7691 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7693
7694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7696 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7697 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7698
7699 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7700 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7701
7702 IEM_MC_ADVANCE_RIP_AND_FINISH();
7703 IEM_MC_END();
7704 }
7705}
7706
7707/* Opcode 0xf2 0x0f 0x7f - invalid */
7708
7709
7710
7711/** Opcode 0x0f 0x80. */
7712FNIEMOP_DEF(iemOp_jo_Jv)
7713{
7714 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7715 IEMOP_HLP_MIN_386();
7716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7717 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7718 {
7719 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7720 IEM_MC_BEGIN(0, 0);
7721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7723 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7724 } IEM_MC_ELSE() {
7725 IEM_MC_ADVANCE_RIP_AND_FINISH();
7726 } IEM_MC_ENDIF();
7727 IEM_MC_END();
7728 }
7729 else
7730 {
7731 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7732 IEM_MC_BEGIN(0, 0);
7733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7735 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7736 } IEM_MC_ELSE() {
7737 IEM_MC_ADVANCE_RIP_AND_FINISH();
7738 } IEM_MC_ENDIF();
7739 IEM_MC_END();
7740 }
7741}
7742
7743
7744/** Opcode 0x0f 0x81. */
7745FNIEMOP_DEF(iemOp_jno_Jv)
7746{
7747 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7748 IEMOP_HLP_MIN_386();
7749 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7750 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7751 {
7752 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7753 IEM_MC_BEGIN(0, 0);
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7756 IEM_MC_ADVANCE_RIP_AND_FINISH();
7757 } IEM_MC_ELSE() {
7758 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7759 } IEM_MC_ENDIF();
7760 IEM_MC_END();
7761 }
7762 else
7763 {
7764 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7765 IEM_MC_BEGIN(0, 0);
7766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7767 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7768 IEM_MC_ADVANCE_RIP_AND_FINISH();
7769 } IEM_MC_ELSE() {
7770 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7771 } IEM_MC_ENDIF();
7772 IEM_MC_END();
7773 }
7774}
7775
7776
7777/** Opcode 0x0f 0x82. */
7778FNIEMOP_DEF(iemOp_jc_Jv)
7779{
7780 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7781 IEMOP_HLP_MIN_386();
7782 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7783 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7784 {
7785 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7786 IEM_MC_BEGIN(0, 0);
7787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7789 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7790 } IEM_MC_ELSE() {
7791 IEM_MC_ADVANCE_RIP_AND_FINISH();
7792 } IEM_MC_ENDIF();
7793 IEM_MC_END();
7794 }
7795 else
7796 {
7797 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7798 IEM_MC_BEGIN(0, 0);
7799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7800 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7801 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7802 } IEM_MC_ELSE() {
7803 IEM_MC_ADVANCE_RIP_AND_FINISH();
7804 } IEM_MC_ENDIF();
7805 IEM_MC_END();
7806 }
7807}
7808
7809
7810/** Opcode 0x0f 0x83. */
7811FNIEMOP_DEF(iemOp_jnc_Jv)
7812{
7813 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7814 IEMOP_HLP_MIN_386();
7815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7816 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7817 {
7818 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7819 IEM_MC_BEGIN(0, 0);
7820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7822 IEM_MC_ADVANCE_RIP_AND_FINISH();
7823 } IEM_MC_ELSE() {
7824 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7825 } IEM_MC_ENDIF();
7826 IEM_MC_END();
7827 }
7828 else
7829 {
7830 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7831 IEM_MC_BEGIN(0, 0);
7832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7833 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7834 IEM_MC_ADVANCE_RIP_AND_FINISH();
7835 } IEM_MC_ELSE() {
7836 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7837 } IEM_MC_ENDIF();
7838 IEM_MC_END();
7839 }
7840}
7841
7842
7843/** Opcode 0x0f 0x84. */
7844FNIEMOP_DEF(iemOp_je_Jv)
7845{
7846 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7847 IEMOP_HLP_MIN_386();
7848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7849 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7850 {
7851 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7852 IEM_MC_BEGIN(0, 0);
7853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7854 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7855 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7856 } IEM_MC_ELSE() {
7857 IEM_MC_ADVANCE_RIP_AND_FINISH();
7858 } IEM_MC_ENDIF();
7859 IEM_MC_END();
7860 }
7861 else
7862 {
7863 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7864 IEM_MC_BEGIN(0, 0);
7865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7866 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7867 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7868 } IEM_MC_ELSE() {
7869 IEM_MC_ADVANCE_RIP_AND_FINISH();
7870 } IEM_MC_ENDIF();
7871 IEM_MC_END();
7872 }
7873}
7874
7875
7876/** Opcode 0x0f 0x85. */
7877FNIEMOP_DEF(iemOp_jne_Jv)
7878{
7879 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7880 IEMOP_HLP_MIN_386();
7881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7882 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7883 {
7884 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7885 IEM_MC_BEGIN(0, 0);
7886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7887 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7888 IEM_MC_ADVANCE_RIP_AND_FINISH();
7889 } IEM_MC_ELSE() {
7890 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7891 } IEM_MC_ENDIF();
7892 IEM_MC_END();
7893 }
7894 else
7895 {
7896 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7897 IEM_MC_BEGIN(0, 0);
7898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7900 IEM_MC_ADVANCE_RIP_AND_FINISH();
7901 } IEM_MC_ELSE() {
7902 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7903 } IEM_MC_ENDIF();
7904 IEM_MC_END();
7905 }
7906}
7907
7908
7909/** Opcode 0x0f 0x86. */
7910FNIEMOP_DEF(iemOp_jbe_Jv)
7911{
7912 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7913 IEMOP_HLP_MIN_386();
7914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7915 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7916 {
7917 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7918 IEM_MC_BEGIN(0, 0);
7919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7920 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7921 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7922 } IEM_MC_ELSE() {
7923 IEM_MC_ADVANCE_RIP_AND_FINISH();
7924 } IEM_MC_ENDIF();
7925 IEM_MC_END();
7926 }
7927 else
7928 {
7929 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7930 IEM_MC_BEGIN(0, 0);
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7933 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7934 } IEM_MC_ELSE() {
7935 IEM_MC_ADVANCE_RIP_AND_FINISH();
7936 } IEM_MC_ENDIF();
7937 IEM_MC_END();
7938 }
7939}
7940
7941
7942/** Opcode 0x0f 0x87. */
7943FNIEMOP_DEF(iemOp_jnbe_Jv)
7944{
7945 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7946 IEMOP_HLP_MIN_386();
7947 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7948 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7949 {
7950 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7951 IEM_MC_BEGIN(0, 0);
7952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7953 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7954 IEM_MC_ADVANCE_RIP_AND_FINISH();
7955 } IEM_MC_ELSE() {
7956 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7957 } IEM_MC_ENDIF();
7958 IEM_MC_END();
7959 }
7960 else
7961 {
7962 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7963 IEM_MC_BEGIN(0, 0);
7964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7965 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7966 IEM_MC_ADVANCE_RIP_AND_FINISH();
7967 } IEM_MC_ELSE() {
7968 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7969 } IEM_MC_ENDIF();
7970 IEM_MC_END();
7971 }
7972}
7973
7974
7975/** Opcode 0x0f 0x88. */
7976FNIEMOP_DEF(iemOp_js_Jv)
7977{
7978 IEMOP_MNEMONIC(js_Jv, "js Jv");
7979 IEMOP_HLP_MIN_386();
7980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7981 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7982 {
7983 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7984 IEM_MC_BEGIN(0, 0);
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7987 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7988 } IEM_MC_ELSE() {
7989 IEM_MC_ADVANCE_RIP_AND_FINISH();
7990 } IEM_MC_ENDIF();
7991 IEM_MC_END();
7992 }
7993 else
7994 {
7995 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7996 IEM_MC_BEGIN(0, 0);
7997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7999 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8000 } IEM_MC_ELSE() {
8001 IEM_MC_ADVANCE_RIP_AND_FINISH();
8002 } IEM_MC_ENDIF();
8003 IEM_MC_END();
8004 }
8005}
8006
8007
8008/** Opcode 0x0f 0x89. */
8009FNIEMOP_DEF(iemOp_jns_Jv)
8010{
8011 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8012 IEMOP_HLP_MIN_386();
8013 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8014 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8015 {
8016 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8017 IEM_MC_BEGIN(0, 0);
8018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8020 IEM_MC_ADVANCE_RIP_AND_FINISH();
8021 } IEM_MC_ELSE() {
8022 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8023 } IEM_MC_ENDIF();
8024 IEM_MC_END();
8025 }
8026 else
8027 {
8028 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8029 IEM_MC_BEGIN(0, 0);
8030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8031 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8032 IEM_MC_ADVANCE_RIP_AND_FINISH();
8033 } IEM_MC_ELSE() {
8034 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8035 } IEM_MC_ENDIF();
8036 IEM_MC_END();
8037 }
8038}
8039
8040
8041/** Opcode 0x0f 0x8a. */
8042FNIEMOP_DEF(iemOp_jp_Jv)
8043{
8044 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8045 IEMOP_HLP_MIN_386();
8046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8047 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8048 {
8049 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8050 IEM_MC_BEGIN(0, 0);
8051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8052 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8053 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8054 } IEM_MC_ELSE() {
8055 IEM_MC_ADVANCE_RIP_AND_FINISH();
8056 } IEM_MC_ENDIF();
8057 IEM_MC_END();
8058 }
8059 else
8060 {
8061 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8062 IEM_MC_BEGIN(0, 0);
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8065 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8066 } IEM_MC_ELSE() {
8067 IEM_MC_ADVANCE_RIP_AND_FINISH();
8068 } IEM_MC_ENDIF();
8069 IEM_MC_END();
8070 }
8071}
8072
8073
8074/** Opcode 0x0f 0x8b. */
8075FNIEMOP_DEF(iemOp_jnp_Jv)
8076{
8077 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8078 IEMOP_HLP_MIN_386();
8079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8081 {
8082 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8083 IEM_MC_BEGIN(0, 0);
8084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8086 IEM_MC_ADVANCE_RIP_AND_FINISH();
8087 } IEM_MC_ELSE() {
8088 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8089 } IEM_MC_ENDIF();
8090 IEM_MC_END();
8091 }
8092 else
8093 {
8094 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8095 IEM_MC_BEGIN(0, 0);
8096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8097 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8098 IEM_MC_ADVANCE_RIP_AND_FINISH();
8099 } IEM_MC_ELSE() {
8100 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8101 } IEM_MC_ENDIF();
8102 IEM_MC_END();
8103 }
8104}
8105
8106
8107/** Opcode 0x0f 0x8c. */
8108FNIEMOP_DEF(iemOp_jl_Jv)
8109{
8110 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8111 IEMOP_HLP_MIN_386();
8112 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8113 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8114 {
8115 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8116 IEM_MC_BEGIN(0, 0);
8117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8118 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8119 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8120 } IEM_MC_ELSE() {
8121 IEM_MC_ADVANCE_RIP_AND_FINISH();
8122 } IEM_MC_ENDIF();
8123 IEM_MC_END();
8124 }
8125 else
8126 {
8127 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8128 IEM_MC_BEGIN(0, 0);
8129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8130 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8131 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8132 } IEM_MC_ELSE() {
8133 IEM_MC_ADVANCE_RIP_AND_FINISH();
8134 } IEM_MC_ENDIF();
8135 IEM_MC_END();
8136 }
8137}
8138
8139
8140/** Opcode 0x0f 0x8d. */
8141FNIEMOP_DEF(iemOp_jnl_Jv)
8142{
8143 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8144 IEMOP_HLP_MIN_386();
8145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8146 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8147 {
8148 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8149 IEM_MC_BEGIN(0, 0);
8150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8151 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8152 IEM_MC_ADVANCE_RIP_AND_FINISH();
8153 } IEM_MC_ELSE() {
8154 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8155 } IEM_MC_ENDIF();
8156 IEM_MC_END();
8157 }
8158 else
8159 {
8160 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8161 IEM_MC_BEGIN(0, 0);
8162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8163 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8164 IEM_MC_ADVANCE_RIP_AND_FINISH();
8165 } IEM_MC_ELSE() {
8166 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8167 } IEM_MC_ENDIF();
8168 IEM_MC_END();
8169 }
8170}
8171
8172
8173/** Opcode 0x0f 0x8e. */
8174FNIEMOP_DEF(iemOp_jle_Jv)
8175{
8176 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8177 IEMOP_HLP_MIN_386();
8178 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8179 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8180 {
8181 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8182 IEM_MC_BEGIN(0, 0);
8183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8184 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8185 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8186 } IEM_MC_ELSE() {
8187 IEM_MC_ADVANCE_RIP_AND_FINISH();
8188 } IEM_MC_ENDIF();
8189 IEM_MC_END();
8190 }
8191 else
8192 {
8193 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8194 IEM_MC_BEGIN(0, 0);
8195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8196 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8197 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8198 } IEM_MC_ELSE() {
8199 IEM_MC_ADVANCE_RIP_AND_FINISH();
8200 } IEM_MC_ENDIF();
8201 IEM_MC_END();
8202 }
8203}
8204
8205
8206/** Opcode 0x0f 0x8f. */
8207FNIEMOP_DEF(iemOp_jnle_Jv)
8208{
8209 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8210 IEMOP_HLP_MIN_386();
8211 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8212 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8213 {
8214 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8215 IEM_MC_BEGIN(0, 0);
8216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8217 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8218 IEM_MC_ADVANCE_RIP_AND_FINISH();
8219 } IEM_MC_ELSE() {
8220 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8221 } IEM_MC_ENDIF();
8222 IEM_MC_END();
8223 }
8224 else
8225 {
8226 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8227 IEM_MC_BEGIN(0, 0);
8228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8229 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8230 IEM_MC_ADVANCE_RIP_AND_FINISH();
8231 } IEM_MC_ELSE() {
8232 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8233 } IEM_MC_ENDIF();
8234 IEM_MC_END();
8235 }
8236}
8237
8238
8239/** Opcode 0x0f 0x90. */
8240FNIEMOP_DEF(iemOp_seto_Eb)
8241{
8242 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8243 IEMOP_HLP_MIN_386();
8244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8245
8246 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8247 * any way. AMD says it's "unused", whatever that means. We're
8248 * ignoring for now. */
8249 if (IEM_IS_MODRM_REG_MODE(bRm))
8250 {
8251 /* register target */
8252 IEM_MC_BEGIN(0, 0);
8253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8254 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8255 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8256 } IEM_MC_ELSE() {
8257 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8258 } IEM_MC_ENDIF();
8259 IEM_MC_ADVANCE_RIP_AND_FINISH();
8260 IEM_MC_END();
8261 }
8262 else
8263 {
8264 /* memory target */
8265 IEM_MC_BEGIN(0, 1);
8266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8269 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8270 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8271 } IEM_MC_ELSE() {
8272 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8273 } IEM_MC_ENDIF();
8274 IEM_MC_ADVANCE_RIP_AND_FINISH();
8275 IEM_MC_END();
8276 }
8277}
8278
8279
8280/** Opcode 0x0f 0x91. */
8281FNIEMOP_DEF(iemOp_setno_Eb)
8282{
8283 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8284 IEMOP_HLP_MIN_386();
8285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8286
8287 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8288 * any way. AMD says it's "unused", whatever that means. We're
8289 * ignoring for now. */
8290 if (IEM_IS_MODRM_REG_MODE(bRm))
8291 {
8292 /* register target */
8293 IEM_MC_BEGIN(0, 0);
8294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8295 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8296 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8297 } IEM_MC_ELSE() {
8298 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8299 } IEM_MC_ENDIF();
8300 IEM_MC_ADVANCE_RIP_AND_FINISH();
8301 IEM_MC_END();
8302 }
8303 else
8304 {
8305 /* memory target */
8306 IEM_MC_BEGIN(0, 1);
8307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8310 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8311 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8312 } IEM_MC_ELSE() {
8313 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8314 } IEM_MC_ENDIF();
8315 IEM_MC_ADVANCE_RIP_AND_FINISH();
8316 IEM_MC_END();
8317 }
8318}
8319
8320
8321/** Opcode 0x0f 0x92. */
8322FNIEMOP_DEF(iemOp_setc_Eb)
8323{
8324 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8325 IEMOP_HLP_MIN_386();
8326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8327
8328 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8329 * any way. AMD says it's "unused", whatever that means. We're
8330 * ignoring for now. */
8331 if (IEM_IS_MODRM_REG_MODE(bRm))
8332 {
8333 /* register target */
8334 IEM_MC_BEGIN(0, 0);
8335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8336 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8337 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8338 } IEM_MC_ELSE() {
8339 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8340 } IEM_MC_ENDIF();
8341 IEM_MC_ADVANCE_RIP_AND_FINISH();
8342 IEM_MC_END();
8343 }
8344 else
8345 {
8346 /* memory target */
8347 IEM_MC_BEGIN(0, 1);
8348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8352 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8353 } IEM_MC_ELSE() {
8354 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8355 } IEM_MC_ENDIF();
8356 IEM_MC_ADVANCE_RIP_AND_FINISH();
8357 IEM_MC_END();
8358 }
8359}
8360
8361
8362/** Opcode 0x0f 0x93. */
8363FNIEMOP_DEF(iemOp_setnc_Eb)
8364{
8365 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8366 IEMOP_HLP_MIN_386();
8367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8368
8369 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8370 * any way. AMD says it's "unused", whatever that means. We're
8371 * ignoring for now. */
8372 if (IEM_IS_MODRM_REG_MODE(bRm))
8373 {
8374 /* register target */
8375 IEM_MC_BEGIN(0, 0);
8376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8377 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8378 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8379 } IEM_MC_ELSE() {
8380 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8381 } IEM_MC_ENDIF();
8382 IEM_MC_ADVANCE_RIP_AND_FINISH();
8383 IEM_MC_END();
8384 }
8385 else
8386 {
8387 /* memory target */
8388 IEM_MC_BEGIN(0, 1);
8389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8392 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8393 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8394 } IEM_MC_ELSE() {
8395 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8396 } IEM_MC_ENDIF();
8397 IEM_MC_ADVANCE_RIP_AND_FINISH();
8398 IEM_MC_END();
8399 }
8400}
8401
8402
8403/** Opcode 0x0f 0x94. */
8404FNIEMOP_DEF(iemOp_sete_Eb)
8405{
8406 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8407 IEMOP_HLP_MIN_386();
8408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8409
8410 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8411 * any way. AMD says it's "unused", whatever that means. We're
8412 * ignoring for now. */
8413 if (IEM_IS_MODRM_REG_MODE(bRm))
8414 {
8415 /* register target */
8416 IEM_MC_BEGIN(0, 0);
8417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8418 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8419 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8420 } IEM_MC_ELSE() {
8421 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8422 } IEM_MC_ENDIF();
8423 IEM_MC_ADVANCE_RIP_AND_FINISH();
8424 IEM_MC_END();
8425 }
8426 else
8427 {
8428 /* memory target */
8429 IEM_MC_BEGIN(0, 1);
8430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8433 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8434 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8435 } IEM_MC_ELSE() {
8436 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8437 } IEM_MC_ENDIF();
8438 IEM_MC_ADVANCE_RIP_AND_FINISH();
8439 IEM_MC_END();
8440 }
8441}
8442
8443
8444/** Opcode 0x0f 0x95. */
8445FNIEMOP_DEF(iemOp_setne_Eb)
8446{
8447 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8448 IEMOP_HLP_MIN_386();
8449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8450
8451 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8452 * any way. AMD says it's "unused", whatever that means. We're
8453 * ignoring for now. */
8454 if (IEM_IS_MODRM_REG_MODE(bRm))
8455 {
8456 /* register target */
8457 IEM_MC_BEGIN(0, 0);
8458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8459 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8460 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8461 } IEM_MC_ELSE() {
8462 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8463 } IEM_MC_ENDIF();
8464 IEM_MC_ADVANCE_RIP_AND_FINISH();
8465 IEM_MC_END();
8466 }
8467 else
8468 {
8469 /* memory target */
8470 IEM_MC_BEGIN(0, 1);
8471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8474 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8475 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8476 } IEM_MC_ELSE() {
8477 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8478 } IEM_MC_ENDIF();
8479 IEM_MC_ADVANCE_RIP_AND_FINISH();
8480 IEM_MC_END();
8481 }
8482}
8483
8484
8485/** Opcode 0x0f 0x96. */
8486FNIEMOP_DEF(iemOp_setbe_Eb)
8487{
8488 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8489 IEMOP_HLP_MIN_386();
8490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8491
8492 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8493 * any way. AMD says it's "unused", whatever that means. We're
8494 * ignoring for now. */
8495 if (IEM_IS_MODRM_REG_MODE(bRm))
8496 {
8497 /* register target */
8498 IEM_MC_BEGIN(0, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8501 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8502 } IEM_MC_ELSE() {
8503 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8504 } IEM_MC_ENDIF();
8505 IEM_MC_ADVANCE_RIP_AND_FINISH();
8506 IEM_MC_END();
8507 }
8508 else
8509 {
8510 /* memory target */
8511 IEM_MC_BEGIN(0, 1);
8512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8515 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8516 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8517 } IEM_MC_ELSE() {
8518 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8519 } IEM_MC_ENDIF();
8520 IEM_MC_ADVANCE_RIP_AND_FINISH();
8521 IEM_MC_END();
8522 }
8523}
8524
8525
8526/** Opcode 0x0f 0x97. */
8527FNIEMOP_DEF(iemOp_setnbe_Eb)
8528{
8529 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8530 IEMOP_HLP_MIN_386();
8531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8532
8533 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8534 * any way. AMD says it's "unused", whatever that means. We're
8535 * ignoring for now. */
8536 if (IEM_IS_MODRM_REG_MODE(bRm))
8537 {
8538 /* register target */
8539 IEM_MC_BEGIN(0, 0);
8540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8541 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8542 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8543 } IEM_MC_ELSE() {
8544 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8545 } IEM_MC_ENDIF();
8546 IEM_MC_ADVANCE_RIP_AND_FINISH();
8547 IEM_MC_END();
8548 }
8549 else
8550 {
8551 /* memory target */
8552 IEM_MC_BEGIN(0, 1);
8553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8556 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8557 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8558 } IEM_MC_ELSE() {
8559 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8560 } IEM_MC_ENDIF();
8561 IEM_MC_ADVANCE_RIP_AND_FINISH();
8562 IEM_MC_END();
8563 }
8564}
8565
8566
8567/** Opcode 0x0f 0x98. */
8568FNIEMOP_DEF(iemOp_sets_Eb)
8569{
8570 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8571 IEMOP_HLP_MIN_386();
8572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8573
8574 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8575 * any way. AMD says it's "unused", whatever that means. We're
8576 * ignoring for now. */
8577 if (IEM_IS_MODRM_REG_MODE(bRm))
8578 {
8579 /* register target */
8580 IEM_MC_BEGIN(0, 0);
8581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8583 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8584 } IEM_MC_ELSE() {
8585 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8586 } IEM_MC_ENDIF();
8587 IEM_MC_ADVANCE_RIP_AND_FINISH();
8588 IEM_MC_END();
8589 }
8590 else
8591 {
8592 /* memory target */
8593 IEM_MC_BEGIN(0, 1);
8594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8597 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8598 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8599 } IEM_MC_ELSE() {
8600 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8601 } IEM_MC_ENDIF();
8602 IEM_MC_ADVANCE_RIP_AND_FINISH();
8603 IEM_MC_END();
8604 }
8605}
8606
8607
8608/** Opcode 0x0f 0x99. */
8609FNIEMOP_DEF(iemOp_setns_Eb)
8610{
8611 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8612 IEMOP_HLP_MIN_386();
8613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8614
8615 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8616 * any way. AMD says it's "unused", whatever that means. We're
8617 * ignoring for now. */
8618 if (IEM_IS_MODRM_REG_MODE(bRm))
8619 {
8620 /* register target */
8621 IEM_MC_BEGIN(0, 0);
8622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8624 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8625 } IEM_MC_ELSE() {
8626 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8627 } IEM_MC_ENDIF();
8628 IEM_MC_ADVANCE_RIP_AND_FINISH();
8629 IEM_MC_END();
8630 }
8631 else
8632 {
8633 /* memory target */
8634 IEM_MC_BEGIN(0, 1);
8635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8638 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8639 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8640 } IEM_MC_ELSE() {
8641 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8642 } IEM_MC_ENDIF();
8643 IEM_MC_ADVANCE_RIP_AND_FINISH();
8644 IEM_MC_END();
8645 }
8646}
8647
8648
8649/** Opcode 0x0f 0x9a. */
8650FNIEMOP_DEF(iemOp_setp_Eb)
8651{
8652 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8653 IEMOP_HLP_MIN_386();
8654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8655
8656 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8657 * any way. AMD says it's "unused", whatever that means. We're
8658 * ignoring for now. */
8659 if (IEM_IS_MODRM_REG_MODE(bRm))
8660 {
8661 /* register target */
8662 IEM_MC_BEGIN(0, 0);
8663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8664 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8665 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8666 } IEM_MC_ELSE() {
8667 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8668 } IEM_MC_ENDIF();
8669 IEM_MC_ADVANCE_RIP_AND_FINISH();
8670 IEM_MC_END();
8671 }
8672 else
8673 {
8674 /* memory target */
8675 IEM_MC_BEGIN(0, 1);
8676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8679 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8680 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8681 } IEM_MC_ELSE() {
8682 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8683 } IEM_MC_ENDIF();
8684 IEM_MC_ADVANCE_RIP_AND_FINISH();
8685 IEM_MC_END();
8686 }
8687}
8688
8689
8690/** Opcode 0x0f 0x9b. */
8691FNIEMOP_DEF(iemOp_setnp_Eb)
8692{
8693 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8694 IEMOP_HLP_MIN_386();
8695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8696
8697 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8698 * any way. AMD says it's "unused", whatever that means. We're
8699 * ignoring for now. */
8700 if (IEM_IS_MODRM_REG_MODE(bRm))
8701 {
8702 /* register target */
8703 IEM_MC_BEGIN(0, 0);
8704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8705 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8706 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8707 } IEM_MC_ELSE() {
8708 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8709 } IEM_MC_ENDIF();
8710 IEM_MC_ADVANCE_RIP_AND_FINISH();
8711 IEM_MC_END();
8712 }
8713 else
8714 {
8715 /* memory target */
8716 IEM_MC_BEGIN(0, 1);
8717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8721 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8722 } IEM_MC_ELSE() {
8723 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8724 } IEM_MC_ENDIF();
8725 IEM_MC_ADVANCE_RIP_AND_FINISH();
8726 IEM_MC_END();
8727 }
8728}
8729
8730
8731/** Opcode 0x0f 0x9c. */
8732FNIEMOP_DEF(iemOp_setl_Eb)
8733{
8734 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8735 IEMOP_HLP_MIN_386();
8736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8737
8738 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8739 * any way. AMD says it's "unused", whatever that means. We're
8740 * ignoring for now. */
8741 if (IEM_IS_MODRM_REG_MODE(bRm))
8742 {
8743 /* register target */
8744 IEM_MC_BEGIN(0, 0);
8745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8746 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8747 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8748 } IEM_MC_ELSE() {
8749 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8750 } IEM_MC_ENDIF();
8751 IEM_MC_ADVANCE_RIP_AND_FINISH();
8752 IEM_MC_END();
8753 }
8754 else
8755 {
8756 /* memory target */
8757 IEM_MC_BEGIN(0, 1);
8758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8761 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8762 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8763 } IEM_MC_ELSE() {
8764 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8765 } IEM_MC_ENDIF();
8766 IEM_MC_ADVANCE_RIP_AND_FINISH();
8767 IEM_MC_END();
8768 }
8769}
8770
8771
8772/** Opcode 0x0f 0x9d. */
8773FNIEMOP_DEF(iemOp_setnl_Eb)
8774{
8775 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8776 IEMOP_HLP_MIN_386();
8777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8778
8779 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8780 * any way. AMD says it's "unused", whatever that means. We're
8781 * ignoring for now. */
8782 if (IEM_IS_MODRM_REG_MODE(bRm))
8783 {
8784 /* register target */
8785 IEM_MC_BEGIN(0, 0);
8786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8787 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8788 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8789 } IEM_MC_ELSE() {
8790 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8791 } IEM_MC_ENDIF();
8792 IEM_MC_ADVANCE_RIP_AND_FINISH();
8793 IEM_MC_END();
8794 }
8795 else
8796 {
8797 /* memory target */
8798 IEM_MC_BEGIN(0, 1);
8799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8802 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8803 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8804 } IEM_MC_ELSE() {
8805 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8806 } IEM_MC_ENDIF();
8807 IEM_MC_ADVANCE_RIP_AND_FINISH();
8808 IEM_MC_END();
8809 }
8810}
8811
8812
8813/** Opcode 0x0f 0x9e. */
8814FNIEMOP_DEF(iemOp_setle_Eb)
8815{
8816 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8817 IEMOP_HLP_MIN_386();
8818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8819
8820 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8821 * any way. AMD says it's "unused", whatever that means. We're
8822 * ignoring for now. */
8823 if (IEM_IS_MODRM_REG_MODE(bRm))
8824 {
8825 /* register target */
8826 IEM_MC_BEGIN(0, 0);
8827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8828 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8829 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8830 } IEM_MC_ELSE() {
8831 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8832 } IEM_MC_ENDIF();
8833 IEM_MC_ADVANCE_RIP_AND_FINISH();
8834 IEM_MC_END();
8835 }
8836 else
8837 {
8838 /* memory target */
8839 IEM_MC_BEGIN(0, 1);
8840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8843 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8844 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8845 } IEM_MC_ELSE() {
8846 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8847 } IEM_MC_ENDIF();
8848 IEM_MC_ADVANCE_RIP_AND_FINISH();
8849 IEM_MC_END();
8850 }
8851}
8852
8853
8854/** Opcode 0x0f 0x9f. */
8855FNIEMOP_DEF(iemOp_setnle_Eb)
8856{
8857 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8858 IEMOP_HLP_MIN_386();
8859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8860
8861 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8862 * any way. AMD says it's "unused", whatever that means. We're
8863 * ignoring for now. */
8864 if (IEM_IS_MODRM_REG_MODE(bRm))
8865 {
8866 /* register target */
8867 IEM_MC_BEGIN(0, 0);
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8870 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8871 } IEM_MC_ELSE() {
8872 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8873 } IEM_MC_ENDIF();
8874 IEM_MC_ADVANCE_RIP_AND_FINISH();
8875 IEM_MC_END();
8876 }
8877 else
8878 {
8879 /* memory target */
8880 IEM_MC_BEGIN(0, 1);
8881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8884 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8885 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8886 } IEM_MC_ELSE() {
8887 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8888 } IEM_MC_ENDIF();
8889 IEM_MC_ADVANCE_RIP_AND_FINISH();
8890 IEM_MC_END();
8891 }
8892}
8893
8894
8895/** Opcode 0x0f 0xa0. */
8896FNIEMOP_DEF(iemOp_push_fs)
8897{
8898 IEMOP_MNEMONIC(push_fs, "push fs");
8899 IEMOP_HLP_MIN_386();
8900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8901 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8902}
8903
8904
8905/** Opcode 0x0f 0xa1. */
8906FNIEMOP_DEF(iemOp_pop_fs)
8907{
8908 IEMOP_MNEMONIC(pop_fs, "pop fs");
8909 IEMOP_HLP_MIN_386();
8910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8911 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8912}
8913
8914
8915/** Opcode 0x0f 0xa2. */
8916FNIEMOP_DEF(iemOp_cpuid)
8917{
8918 IEMOP_MNEMONIC(cpuid, "cpuid");
8919 IEMOP_HLP_MIN_486(); /* not all 486es. */
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_cpuid);
8922}
8923
8924
8925/**
8926 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8927 * iemOp_bts_Ev_Gv.
8928 */
8929
8930#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8932 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8933 \
8934 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8935 { \
8936 /* register destination. */ \
8937 switch (pVCpu->iem.s.enmEffOpSize) \
8938 { \
8939 case IEMMODE_16BIT: \
8940 IEM_MC_BEGIN(3, 0); \
8941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8942 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8943 IEM_MC_ARG(uint16_t, u16Src, 1); \
8944 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8945 \
8946 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8947 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8948 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8949 IEM_MC_REF_EFLAGS(pEFlags); \
8950 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8951 \
8952 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8953 IEM_MC_END(); \
8954 break; \
8955 \
8956 case IEMMODE_32BIT: \
8957 IEM_MC_BEGIN(3, 0); \
8958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8959 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8960 IEM_MC_ARG(uint32_t, u32Src, 1); \
8961 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8962 \
8963 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8964 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8965 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8966 IEM_MC_REF_EFLAGS(pEFlags); \
8967 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8968 \
8969 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
8970 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8971 IEM_MC_END(); \
8972 break; \
8973 \
8974 case IEMMODE_64BIT: \
8975 IEM_MC_BEGIN(3, 0); \
8976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8977 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8978 IEM_MC_ARG(uint64_t, u64Src, 1); \
8979 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8980 \
8981 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8982 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8983 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8984 IEM_MC_REF_EFLAGS(pEFlags); \
8985 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8986 \
8987 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8988 IEM_MC_END(); \
8989 break; \
8990 \
8991 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8992 } \
8993 } \
8994 else \
8995 { \
8996 /* memory destination. */ \
8997 /** @todo test negative bit offsets! */ \
8998 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
8999 { \
9000 switch (pVCpu->iem.s.enmEffOpSize) \
9001 { \
9002 case IEMMODE_16BIT: \
9003 IEM_MC_BEGIN(3, 4); \
9004 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9005 IEM_MC_ARG(uint16_t, u16Src, 1); \
9006 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9008 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9009 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9010 \
9011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9012 IEMOP_HLP_DONE_DECODING(); \
9013 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9014 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9015 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9016 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9017 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9018 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9019 IEM_MC_FETCH_EFLAGS(EFlags); \
9020 \
9021 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9022 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9023 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9024 \
9025 IEM_MC_COMMIT_EFLAGS(EFlags); \
9026 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9027 IEM_MC_END(); \
9028 break; \
9029 \
9030 case IEMMODE_32BIT: \
9031 IEM_MC_BEGIN(3, 4); \
9032 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9033 IEM_MC_ARG(uint32_t, u32Src, 1); \
9034 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9036 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9037 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9038 \
9039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9040 IEMOP_HLP_DONE_DECODING(); \
9041 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9042 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9043 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9044 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9045 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9046 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9047 IEM_MC_FETCH_EFLAGS(EFlags); \
9048 \
9049 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9050 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9051 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9052 \
9053 IEM_MC_COMMIT_EFLAGS(EFlags); \
9054 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9055 IEM_MC_END(); \
9056 break; \
9057 \
9058 case IEMMODE_64BIT: \
9059 IEM_MC_BEGIN(3, 5); \
9060 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9061 IEM_MC_ARG(uint64_t, u64Src, 1); \
9062 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9064 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9065 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9066 \
9067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9068 IEMOP_HLP_DONE_DECODING(); \
9069 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9070 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9071 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9072 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9073 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9074 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9075 IEM_MC_FETCH_EFLAGS(EFlags); \
9076 \
9077 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9078 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9079 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9080 \
9081 IEM_MC_COMMIT_EFLAGS(EFlags); \
9082 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9083 IEM_MC_END(); \
9084 break; \
9085 \
9086 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9087 } \
9088 } \
9089 else \
9090 { \
9091 (void)0
9092/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9093#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9094 switch (pVCpu->iem.s.enmEffOpSize) \
9095 { \
9096 case IEMMODE_16BIT: \
9097 IEM_MC_BEGIN(3, 4); \
9098 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9099 IEM_MC_ARG(uint16_t, u16Src, 1); \
9100 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9102 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9103 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9104 \
9105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9106 IEMOP_HLP_DONE_DECODING(); \
9107 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9108 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9109 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9110 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9111 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9112 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9113 IEM_MC_FETCH_EFLAGS(EFlags); \
9114 \
9115 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9116 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9117 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9118 \
9119 IEM_MC_COMMIT_EFLAGS(EFlags); \
9120 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9121 IEM_MC_END(); \
9122 break; \
9123 \
9124 case IEMMODE_32BIT: \
9125 IEM_MC_BEGIN(3, 4); \
9126 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9127 IEM_MC_ARG(uint32_t, u32Src, 1); \
9128 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9130 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9131 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9132 \
9133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9134 IEMOP_HLP_DONE_DECODING(); \
9135 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9136 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9137 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9138 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9139 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9140 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9141 IEM_MC_FETCH_EFLAGS(EFlags); \
9142 \
9143 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9144 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9145 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9146 \
9147 IEM_MC_COMMIT_EFLAGS(EFlags); \
9148 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9149 IEM_MC_END(); \
9150 break; \
9151 \
9152 case IEMMODE_64BIT: \
9153 IEM_MC_BEGIN(3, 4); \
9154 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9155 IEM_MC_ARG(uint64_t, u64Src, 1); \
9156 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9158 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9159 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9160 \
9161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9162 IEMOP_HLP_DONE_DECODING(); \
9163 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9164 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9165 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9166 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9167 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9168 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9169 IEM_MC_FETCH_EFLAGS(EFlags); \
9170 \
9171 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9172 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9173 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9174 \
9175 IEM_MC_COMMIT_EFLAGS(EFlags); \
9176 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9177 IEM_MC_END(); \
9178 break; \
9179 \
9180 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9181 } \
9182 } \
9183 } \
9184 (void)0
9185
9186/* Read-only version (bt). */
9187#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9189 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9190 \
9191 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9192 { \
9193 /* register destination. */ \
9194 switch (pVCpu->iem.s.enmEffOpSize) \
9195 { \
9196 case IEMMODE_16BIT: \
9197 IEM_MC_BEGIN(3, 0); \
9198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9199 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9200 IEM_MC_ARG(uint16_t, u16Src, 1); \
9201 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9202 \
9203 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9204 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9205 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9206 IEM_MC_REF_EFLAGS(pEFlags); \
9207 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9208 \
9209 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9210 IEM_MC_END(); \
9211 break; \
9212 \
9213 case IEMMODE_32BIT: \
9214 IEM_MC_BEGIN(3, 0); \
9215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9216 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9217 IEM_MC_ARG(uint32_t, u32Src, 1); \
9218 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9219 \
9220 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9221 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9222 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9223 IEM_MC_REF_EFLAGS(pEFlags); \
9224 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9225 \
9226 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9227 IEM_MC_END(); \
9228 break; \
9229 \
9230 case IEMMODE_64BIT: \
9231 IEM_MC_BEGIN(3, 0); \
9232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9233 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9234 IEM_MC_ARG(uint64_t, u64Src, 1); \
9235 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9236 \
9237 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9238 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9239 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9240 IEM_MC_REF_EFLAGS(pEFlags); \
9241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9242 \
9243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9244 IEM_MC_END(); \
9245 break; \
9246 \
9247 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9248 } \
9249 } \
9250 else \
9251 { \
9252 /* memory destination. */ \
9253 /** @todo test negative bit offsets! */ \
9254 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9255 { \
9256 switch (pVCpu->iem.s.enmEffOpSize) \
9257 { \
9258 case IEMMODE_16BIT: \
9259 IEM_MC_BEGIN(3, 4); \
9260 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9261 IEM_MC_ARG(uint16_t, u16Src, 1); \
9262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9264 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9265 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9266 \
9267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9268 IEMOP_HLP_DONE_DECODING(); \
9269 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9270 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9271 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9272 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9273 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9274 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9275 IEM_MC_FETCH_EFLAGS(EFlags); \
9276 \
9277 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9278 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9279 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
9280 \
9281 IEM_MC_COMMIT_EFLAGS(EFlags); \
9282 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9283 IEM_MC_END(); \
9284 break; \
9285 \
9286 case IEMMODE_32BIT: \
9287 IEM_MC_BEGIN(3, 4); \
9288 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9289 IEM_MC_ARG(uint32_t, u32Src, 1); \
9290 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9292 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9293 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9294 \
9295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9296 IEMOP_HLP_DONE_DECODING(); \
9297 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9298 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9299 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9300 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9301 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9302 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9303 IEM_MC_FETCH_EFLAGS(EFlags); \
9304 \
9305 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9307 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
9308 \
9309 IEM_MC_COMMIT_EFLAGS(EFlags); \
9310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9311 IEM_MC_END(); \
9312 break; \
9313 \
9314 case IEMMODE_64BIT: \
9315 IEM_MC_BEGIN(3, 4); \
9316 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9317 IEM_MC_ARG(uint64_t, u64Src, 1); \
9318 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9320 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9321 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9322 \
9323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9324 IEMOP_HLP_DONE_DECODING(); \
9325 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9326 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9327 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9328 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9329 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9330 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9331 IEM_MC_FETCH_EFLAGS(EFlags); \
9332 \
9333 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9334 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9335 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
9336 \
9337 IEM_MC_COMMIT_EFLAGS(EFlags); \
9338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9339 IEM_MC_END(); \
9340 break; \
9341 \
9342 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9343 } \
9344 } \
9345 else \
9346 { \
9347 IEMOP_HLP_DONE_DECODING(); \
9348 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9349 } \
9350 } \
9351 (void)0
9352
9353
9354/** Opcode 0x0f 0xa3. */
9355FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9356{
9357 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9358 IEMOP_HLP_MIN_386();
9359 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9360}
9361
9362
9363/**
9364 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9365 */
9366FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9367{
9368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9369 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9370
9371 if (IEM_IS_MODRM_REG_MODE(bRm))
9372 {
9373 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9374
9375 switch (pVCpu->iem.s.enmEffOpSize)
9376 {
9377 case IEMMODE_16BIT:
9378 IEM_MC_BEGIN(4, 0);
9379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9380 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9381 IEM_MC_ARG(uint16_t, u16Src, 1);
9382 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9383 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9384
9385 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9386 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9387 IEM_MC_REF_EFLAGS(pEFlags);
9388 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9389
9390 IEM_MC_ADVANCE_RIP_AND_FINISH();
9391 IEM_MC_END();
9392 break;
9393
9394 case IEMMODE_32BIT:
9395 IEM_MC_BEGIN(4, 0);
9396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9397 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9398 IEM_MC_ARG(uint32_t, u32Src, 1);
9399 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9400 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9401
9402 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9403 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9404 IEM_MC_REF_EFLAGS(pEFlags);
9405 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9406
9407 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9408 IEM_MC_ADVANCE_RIP_AND_FINISH();
9409 IEM_MC_END();
9410 break;
9411
9412 case IEMMODE_64BIT:
9413 IEM_MC_BEGIN(4, 0);
9414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9415 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9416 IEM_MC_ARG(uint64_t, u64Src, 1);
9417 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9418 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9419
9420 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9421 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9422 IEM_MC_REF_EFLAGS(pEFlags);
9423 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9424
9425 IEM_MC_ADVANCE_RIP_AND_FINISH();
9426 IEM_MC_END();
9427 break;
9428
9429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9430 }
9431 }
9432 else
9433 {
9434 switch (pVCpu->iem.s.enmEffOpSize)
9435 {
9436 case IEMMODE_16BIT:
9437 IEM_MC_BEGIN(4, 3);
9438 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9439 IEM_MC_ARG(uint16_t, u16Src, 1);
9440 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9441 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9443 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9444
9445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9446 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9447 IEM_MC_ASSIGN(cShiftArg, cShift);
9448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9449 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9450 IEM_MC_FETCH_EFLAGS(EFlags);
9451 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9452 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9453
9454 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9455 IEM_MC_COMMIT_EFLAGS(EFlags);
9456 IEM_MC_ADVANCE_RIP_AND_FINISH();
9457 IEM_MC_END();
9458 break;
9459
9460 case IEMMODE_32BIT:
9461 IEM_MC_BEGIN(4, 3);
9462 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9463 IEM_MC_ARG(uint32_t, u32Src, 1);
9464 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9465 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9467 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9468
9469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9470 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9471 IEM_MC_ASSIGN(cShiftArg, cShift);
9472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9473 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9474 IEM_MC_FETCH_EFLAGS(EFlags);
9475 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9476 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9477
9478 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9479 IEM_MC_COMMIT_EFLAGS(EFlags);
9480 IEM_MC_ADVANCE_RIP_AND_FINISH();
9481 IEM_MC_END();
9482 break;
9483
9484 case IEMMODE_64BIT:
9485 IEM_MC_BEGIN(4, 3);
9486 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9487 IEM_MC_ARG(uint64_t, u64Src, 1);
9488 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9489 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9491 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9492
9493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9494 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9495 IEM_MC_ASSIGN(cShiftArg, cShift);
9496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9497 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9498 IEM_MC_FETCH_EFLAGS(EFlags);
9499 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9500 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9501
9502 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9503 IEM_MC_COMMIT_EFLAGS(EFlags);
9504 IEM_MC_ADVANCE_RIP_AND_FINISH();
9505 IEM_MC_END();
9506 break;
9507
9508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9509 }
9510 }
9511}
9512
9513
9514/**
9515 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9516 */
9517FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9518{
9519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9520 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9521
9522 if (IEM_IS_MODRM_REG_MODE(bRm))
9523 {
9524 switch (pVCpu->iem.s.enmEffOpSize)
9525 {
9526 case IEMMODE_16BIT:
9527 IEM_MC_BEGIN(4, 0);
9528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9529 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9530 IEM_MC_ARG(uint16_t, u16Src, 1);
9531 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9532 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9533
9534 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9535 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9536 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9537 IEM_MC_REF_EFLAGS(pEFlags);
9538 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9539
9540 IEM_MC_ADVANCE_RIP_AND_FINISH();
9541 IEM_MC_END();
9542 break;
9543
9544 case IEMMODE_32BIT:
9545 IEM_MC_BEGIN(4, 0);
9546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9547 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9548 IEM_MC_ARG(uint32_t, u32Src, 1);
9549 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9550 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9551
9552 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9553 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9554 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9555 IEM_MC_REF_EFLAGS(pEFlags);
9556 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9557
9558 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9559 IEM_MC_ADVANCE_RIP_AND_FINISH();
9560 IEM_MC_END();
9561 break;
9562
9563 case IEMMODE_64BIT:
9564 IEM_MC_BEGIN(4, 0);
9565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9566 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9567 IEM_MC_ARG(uint64_t, u64Src, 1);
9568 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9569 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9570
9571 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9572 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9573 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9574 IEM_MC_REF_EFLAGS(pEFlags);
9575 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9576
9577 IEM_MC_ADVANCE_RIP_AND_FINISH();
9578 IEM_MC_END();
9579 break;
9580
9581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9582 }
9583 }
9584 else
9585 {
9586 switch (pVCpu->iem.s.enmEffOpSize)
9587 {
9588 case IEMMODE_16BIT:
9589 IEM_MC_BEGIN(4, 3);
9590 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9591 IEM_MC_ARG(uint16_t, u16Src, 1);
9592 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9593 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9595 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9596
9597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9599 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9600 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9601 IEM_MC_FETCH_EFLAGS(EFlags);
9602 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9603 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9604
9605 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9606 IEM_MC_COMMIT_EFLAGS(EFlags);
9607 IEM_MC_ADVANCE_RIP_AND_FINISH();
9608 IEM_MC_END();
9609 break;
9610
9611 case IEMMODE_32BIT:
9612 IEM_MC_BEGIN(4, 3);
9613 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9614 IEM_MC_ARG(uint32_t, u32Src, 1);
9615 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9616 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9618 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9619
9620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9622 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9623 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9624 IEM_MC_FETCH_EFLAGS(EFlags);
9625 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9626 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9627
9628 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9629 IEM_MC_COMMIT_EFLAGS(EFlags);
9630 IEM_MC_ADVANCE_RIP_AND_FINISH();
9631 IEM_MC_END();
9632 break;
9633
9634 case IEMMODE_64BIT:
9635 IEM_MC_BEGIN(4, 3);
9636 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9637 IEM_MC_ARG(uint64_t, u64Src, 1);
9638 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9641 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9642
9643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9645 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9646 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9647 IEM_MC_FETCH_EFLAGS(EFlags);
9648 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9649 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9650
9651 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9652 IEM_MC_COMMIT_EFLAGS(EFlags);
9653 IEM_MC_ADVANCE_RIP_AND_FINISH();
9654 IEM_MC_END();
9655 break;
9656
9657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9658 }
9659 }
9660}
9661
9662
9663
9664/** Opcode 0x0f 0xa4. */
9665FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9666{
9667 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9668 IEMOP_HLP_MIN_386();
9669 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9670}
9671
9672
9673/** Opcode 0x0f 0xa5. */
9674FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9675{
9676 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9677 IEMOP_HLP_MIN_386();
9678 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9679}
9680
9681
9682/** Opcode 0x0f 0xa8. */
9683FNIEMOP_DEF(iemOp_push_gs)
9684{
9685 IEMOP_MNEMONIC(push_gs, "push gs");
9686 IEMOP_HLP_MIN_386();
9687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9688 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9689}
9690
9691
9692/** Opcode 0x0f 0xa9. */
9693FNIEMOP_DEF(iemOp_pop_gs)
9694{
9695 IEMOP_MNEMONIC(pop_gs, "pop gs");
9696 IEMOP_HLP_MIN_386();
9697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9698 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9699}
9700
9701
9702/** Opcode 0x0f 0xaa. */
9703FNIEMOP_DEF(iemOp_rsm)
9704{
9705 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9706 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9708 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
9709 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
9710 iemCImpl_rsm);
9711}
9712
9713
9714
9715/** Opcode 0x0f 0xab. */
9716FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9717{
9718 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9719 IEMOP_HLP_MIN_386();
9720 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9721 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9722}
9723
9724
9725/** Opcode 0x0f 0xac. */
9726FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9727{
9728 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9729 IEMOP_HLP_MIN_386();
9730 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9731}
9732
9733
9734/** Opcode 0x0f 0xad. */
9735FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9736{
9737 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9738 IEMOP_HLP_MIN_386();
9739 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9740}
9741
9742
9743/** Opcode 0x0f 0xae mem/0. */
9744FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9745{
9746 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9747 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9748 IEMOP_RAISE_INVALID_OPCODE_RET();
9749
9750 IEM_MC_BEGIN(3, 1);
9751 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9752 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9753 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9756 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9757 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9758 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9759 IEM_MC_END();
9760}
9761
9762
9763/** Opcode 0x0f 0xae mem/1. */
9764FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9765{
9766 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9767 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9768 IEMOP_RAISE_INVALID_OPCODE_RET();
9769
9770 IEM_MC_BEGIN(3, 1);
9771 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9772 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9773 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9777 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9778 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9779 IEM_MC_END();
9780}
9781
9782
9783/**
9784 * @opmaps grp15
9785 * @opcode !11/2
9786 * @oppfx none
9787 * @opcpuid sse
9788 * @opgroup og_sse_mxcsrsm
9789 * @opxcpttype 5
9790 * @optest op1=0 -> mxcsr=0
9791 * @optest op1=0x2083 -> mxcsr=0x2083
9792 * @optest op1=0xfffffffe -> value.xcpt=0xd
9793 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9794 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9795 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9796 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9797 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9798 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9799 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9800 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9801 */
9802FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9803{
9804 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9805 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9806 IEMOP_RAISE_INVALID_OPCODE_RET();
9807
9808 IEM_MC_BEGIN(2, 0);
9809 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9810 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9813 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9814 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9815 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9816 IEM_MC_END();
9817}
9818
9819
9820/**
9821 * @opmaps grp15
9822 * @opcode !11/3
9823 * @oppfx none
9824 * @opcpuid sse
9825 * @opgroup og_sse_mxcsrsm
9826 * @opxcpttype 5
9827 * @optest mxcsr=0 -> op1=0
9828 * @optest mxcsr=0x2083 -> op1=0x2083
9829 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9830 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9831 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9832 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9833 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9834 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9835 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9836 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9837 */
9838FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9839{
9840 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9841 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9842 IEMOP_RAISE_INVALID_OPCODE_RET();
9843
9844 IEM_MC_BEGIN(2, 0);
9845 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9846 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9849 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9850 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9851 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9852 IEM_MC_END();
9853}
9854
9855
9856/**
9857 * @opmaps grp15
9858 * @opcode !11/4
9859 * @oppfx none
9860 * @opcpuid xsave
9861 * @opgroup og_system
9862 * @opxcpttype none
9863 */
9864FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9865{
9866 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9867 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9868 IEMOP_RAISE_INVALID_OPCODE_RET();
9869
9870 IEM_MC_BEGIN(3, 0);
9871 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9872 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9873 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9876 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9877 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9878 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9879 IEM_MC_END();
9880}
9881
9882
9883/**
9884 * @opmaps grp15
9885 * @opcode !11/5
9886 * @oppfx none
9887 * @opcpuid xsave
9888 * @opgroup og_system
9889 * @opxcpttype none
9890 */
9891FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9892{
9893 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9894 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9895 IEMOP_RAISE_INVALID_OPCODE_RET();
9896
9897 IEM_MC_BEGIN(3, 0);
9898 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9899 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9900 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9903 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9904 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9905 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9906 IEM_MC_END();
9907}
9908
9909/** Opcode 0x0f 0xae mem/6. */
9910FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9911
9912/**
9913 * @opmaps grp15
9914 * @opcode !11/7
9915 * @oppfx none
9916 * @opcpuid clfsh
9917 * @opgroup og_cachectl
9918 * @optest op1=1 ->
9919 */
9920FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9921{
9922 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9923 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9924 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9925
9926 IEM_MC_BEGIN(2, 0);
9927 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9928 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9931 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9932 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9933 IEM_MC_END();
9934}
9935
9936/**
9937 * @opmaps grp15
9938 * @opcode !11/7
9939 * @oppfx 0x66
9940 * @opcpuid clflushopt
9941 * @opgroup og_cachectl
9942 * @optest op1=1 ->
9943 */
9944FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9945{
9946 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9948 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9949
9950 IEM_MC_BEGIN(2, 0);
9951 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9952 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9955 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9956 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9957 IEM_MC_END();
9958}
9959
9960
9961/** Opcode 0x0f 0xae 11b/5. */
9962FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9963{
9964 RT_NOREF_PV(bRm);
9965 IEMOP_MNEMONIC(lfence, "lfence");
9966 IEM_MC_BEGIN(0, 0);
9967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9968#ifdef RT_ARCH_ARM64
9969 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9970#else
9971 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9972 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9973 else
9974 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9975#endif
9976 IEM_MC_ADVANCE_RIP_AND_FINISH();
9977 IEM_MC_END();
9978}
9979
9980
9981/** Opcode 0x0f 0xae 11b/6. */
9982FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9983{
9984 RT_NOREF_PV(bRm);
9985 IEMOP_MNEMONIC(mfence, "mfence");
9986 IEM_MC_BEGIN(0, 0);
9987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9988#ifdef RT_ARCH_ARM64
9989 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9990#else
9991 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9992 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9993 else
9994 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9995#endif
9996 IEM_MC_ADVANCE_RIP_AND_FINISH();
9997 IEM_MC_END();
9998}
9999
10000
10001/** Opcode 0x0f 0xae 11b/7. */
10002FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
10003{
10004 RT_NOREF_PV(bRm);
10005 IEMOP_MNEMONIC(sfence, "sfence");
10006 IEM_MC_BEGIN(0, 0);
10007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10008#ifdef RT_ARCH_ARM64
10009 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10010#else
10011 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10012 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10013 else
10014 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10015#endif
10016 IEM_MC_ADVANCE_RIP_AND_FINISH();
10017 IEM_MC_END();
10018}
10019
10020
10021/** Opcode 0xf3 0x0f 0xae 11b/0. */
10022FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10023{
10024 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10025 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10026 {
10027 IEM_MC_BEGIN(1, 0);
10028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10029 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10030 IEM_MC_ARG(uint64_t, u64Dst, 0);
10031 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10032 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10033 IEM_MC_ADVANCE_RIP_AND_FINISH();
10034 IEM_MC_END();
10035 }
10036 else
10037 {
10038 IEM_MC_BEGIN(1, 0);
10039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10040 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10041 IEM_MC_ARG(uint32_t, u32Dst, 0);
10042 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10043 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10044 IEM_MC_ADVANCE_RIP_AND_FINISH();
10045 IEM_MC_END();
10046 }
10047}
10048
10049
10050/** Opcode 0xf3 0x0f 0xae 11b/1. */
10051FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10052{
10053 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10054 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10055 {
10056 IEM_MC_BEGIN(1, 0);
10057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10058 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10059 IEM_MC_ARG(uint64_t, u64Dst, 0);
10060 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10061 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10062 IEM_MC_ADVANCE_RIP_AND_FINISH();
10063 IEM_MC_END();
10064 }
10065 else
10066 {
10067 IEM_MC_BEGIN(1, 0);
10068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10069 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10070 IEM_MC_ARG(uint32_t, u32Dst, 0);
10071 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10072 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10073 IEM_MC_ADVANCE_RIP_AND_FINISH();
10074 IEM_MC_END();
10075 }
10076}
10077
10078
10079/** Opcode 0xf3 0x0f 0xae 11b/2. */
10080FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10081{
10082 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10083 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10084 {
10085 IEM_MC_BEGIN(1, 0);
10086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10087 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10088 IEM_MC_ARG(uint64_t, u64Dst, 0);
10089 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10090 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10091 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10092 IEM_MC_ADVANCE_RIP_AND_FINISH();
10093 IEM_MC_END();
10094 }
10095 else
10096 {
10097 IEM_MC_BEGIN(1, 0);
10098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10099 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10100 IEM_MC_ARG(uint32_t, u32Dst, 0);
10101 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10102 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10103 IEM_MC_ADVANCE_RIP_AND_FINISH();
10104 IEM_MC_END();
10105 }
10106}
10107
10108
10109/** Opcode 0xf3 0x0f 0xae 11b/3. */
10110FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10111{
10112 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10113 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10114 {
10115 IEM_MC_BEGIN(1, 0);
10116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10117 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10118 IEM_MC_ARG(uint64_t, u64Dst, 0);
10119 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10120 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10121 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10122 IEM_MC_ADVANCE_RIP_AND_FINISH();
10123 IEM_MC_END();
10124 }
10125 else
10126 {
10127 IEM_MC_BEGIN(1, 0);
10128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10129 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10130 IEM_MC_ARG(uint32_t, u32Dst, 0);
10131 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10132 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10133 IEM_MC_ADVANCE_RIP_AND_FINISH();
10134 IEM_MC_END();
10135 }
10136}
10137
10138
10139/**
10140 * Group 15 jump table for register variant.
10141 */
10142IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10143{ /* pfx: none, 066h, 0f3h, 0f2h */
10144 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10145 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10146 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10147 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10148 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10149 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10150 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10151 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10152};
10153AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10154
10155
10156/**
10157 * Group 15 jump table for memory variant.
10158 */
10159IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10160{ /* pfx: none, 066h, 0f3h, 0f2h */
10161 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10162 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10163 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10164 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10165 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10166 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10167 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10168 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10169};
10170AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10171
10172
10173/** Opcode 0x0f 0xae. */
10174FNIEMOP_DEF(iemOp_Grp15)
10175{
10176 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10178 if (IEM_IS_MODRM_REG_MODE(bRm))
10179 /* register, register */
10180 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10181 + pVCpu->iem.s.idxPrefix], bRm);
10182 /* memory, register */
10183 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10184 + pVCpu->iem.s.idxPrefix], bRm);
10185}
10186
10187
10188/** Opcode 0x0f 0xaf. */
10189FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10190{
10191 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10192 IEMOP_HLP_MIN_386();
10193 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10194 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10195 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10196}
10197
10198
10199/** Opcode 0x0f 0xb0. */
10200FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10201{
10202 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10203 IEMOP_HLP_MIN_486();
10204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10205
10206 if (IEM_IS_MODRM_REG_MODE(bRm))
10207 {
10208 IEM_MC_BEGIN(4, 0);
10209 IEMOP_HLP_DONE_DECODING();
10210 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10211 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10212 IEM_MC_ARG(uint8_t, u8Src, 2);
10213 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10214
10215 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10216 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10217 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10218 IEM_MC_REF_EFLAGS(pEFlags);
10219 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10220 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10221 else
10222 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10223
10224 IEM_MC_ADVANCE_RIP_AND_FINISH();
10225 IEM_MC_END();
10226 }
10227 else
10228 {
10229 IEM_MC_BEGIN(4, 4);
10230 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10231 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10232 IEM_MC_ARG(uint8_t, u8Src, 2);
10233 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10235 IEM_MC_LOCAL(uint8_t, u8Al);
10236 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10237
10238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10239 IEMOP_HLP_DONE_DECODING();
10240 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10241 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10242 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10243 IEM_MC_FETCH_EFLAGS(EFlags);
10244 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10245 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10246 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10247 else
10248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10249
10250 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
10251 IEM_MC_COMMIT_EFLAGS(EFlags);
10252 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10253 IEM_MC_ADVANCE_RIP_AND_FINISH();
10254 IEM_MC_END();
10255 }
10256}
10257
10258/** Opcode 0x0f 0xb1. */
10259FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10260{
10261 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10262 IEMOP_HLP_MIN_486();
10263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10264
10265 if (IEM_IS_MODRM_REG_MODE(bRm))
10266 {
10267 switch (pVCpu->iem.s.enmEffOpSize)
10268 {
10269 case IEMMODE_16BIT:
10270 IEM_MC_BEGIN(4, 0);
10271 IEMOP_HLP_DONE_DECODING();
10272 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10273 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10274 IEM_MC_ARG(uint16_t, u16Src, 2);
10275 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10276
10277 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10278 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10279 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10280 IEM_MC_REF_EFLAGS(pEFlags);
10281 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10282 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10283 else
10284 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10285
10286 IEM_MC_ADVANCE_RIP_AND_FINISH();
10287 IEM_MC_END();
10288 break;
10289
10290 case IEMMODE_32BIT:
10291 IEM_MC_BEGIN(4, 0);
10292 IEMOP_HLP_DONE_DECODING();
10293 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10294 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10295 IEM_MC_ARG(uint32_t, u32Src, 2);
10296 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10297
10298 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10299 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10300 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10301 IEM_MC_REF_EFLAGS(pEFlags);
10302 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10303 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10304 else
10305 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10306
10307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10308 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10309 } IEM_MC_ELSE() {
10310 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10311 } IEM_MC_ENDIF();
10312
10313 IEM_MC_ADVANCE_RIP_AND_FINISH();
10314 IEM_MC_END();
10315 break;
10316
10317 case IEMMODE_64BIT:
10318 IEM_MC_BEGIN(4, 0);
10319 IEMOP_HLP_DONE_DECODING();
10320 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10321 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10322#ifdef RT_ARCH_X86
10323 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10324#else
10325 IEM_MC_ARG(uint64_t, u64Src, 2);
10326#endif
10327 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10328
10329 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10330 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10331 IEM_MC_REF_EFLAGS(pEFlags);
10332#ifdef RT_ARCH_X86
10333 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10334 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10335 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10336 else
10337 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10338#else
10339 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10340 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10341 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10342 else
10343 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10344#endif
10345
10346 IEM_MC_ADVANCE_RIP_AND_FINISH();
10347 IEM_MC_END();
10348 break;
10349
10350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10351 }
10352 }
10353 else
10354 {
10355 switch (pVCpu->iem.s.enmEffOpSize)
10356 {
10357 case IEMMODE_16BIT:
10358 IEM_MC_BEGIN(4, 4);
10359 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10360 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10361 IEM_MC_ARG(uint16_t, u16Src, 2);
10362 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10364 IEM_MC_LOCAL(uint16_t, u16Ax);
10365 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10366
10367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10368 IEMOP_HLP_DONE_DECODING();
10369 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10370 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10371 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10372 IEM_MC_FETCH_EFLAGS(EFlags);
10373 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10374 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10375 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10376 else
10377 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10378
10379 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
10380 IEM_MC_COMMIT_EFLAGS(EFlags);
10381 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10382 IEM_MC_ADVANCE_RIP_AND_FINISH();
10383 IEM_MC_END();
10384 break;
10385
10386 case IEMMODE_32BIT:
10387 IEM_MC_BEGIN(4, 4);
10388 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10389 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10390 IEM_MC_ARG(uint32_t, u32Src, 2);
10391 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10393 IEM_MC_LOCAL(uint32_t, u32Eax);
10394 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10395
10396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10397 IEMOP_HLP_DONE_DECODING();
10398 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10399 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10400 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10401 IEM_MC_FETCH_EFLAGS(EFlags);
10402 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10403 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10404 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10405 else
10406 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10407
10408 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
10409 IEM_MC_COMMIT_EFLAGS(EFlags);
10410
10411 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10412 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10413 } IEM_MC_ENDIF();
10414
10415 IEM_MC_ADVANCE_RIP_AND_FINISH();
10416 IEM_MC_END();
10417 break;
10418
10419 case IEMMODE_64BIT:
10420 IEM_MC_BEGIN(4, 4);
10421 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10422 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10423#ifdef RT_ARCH_X86
10424 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10425#else
10426 IEM_MC_ARG(uint64_t, u64Src, 2);
10427#endif
10428 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10430 IEM_MC_LOCAL(uint64_t, u64Rax);
10431 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10432
10433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10434 IEMOP_HLP_DONE_DECODING();
10435 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10436 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10437 IEM_MC_FETCH_EFLAGS(EFlags);
10438 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10439#ifdef RT_ARCH_X86
10440 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10441 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10442 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10443 else
10444 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10445#else
10446 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10447 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10448 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10449 else
10450 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10451#endif
10452
10453 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
10454 IEM_MC_COMMIT_EFLAGS(EFlags);
10455 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10456 IEM_MC_ADVANCE_RIP_AND_FINISH();
10457 IEM_MC_END();
10458 break;
10459
10460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10461 }
10462 }
10463}
10464
10465
10466/** Opcode 0x0f 0xb2. */
10467FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10468{
10469 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10470 IEMOP_HLP_MIN_386();
10471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10472 if (IEM_IS_MODRM_REG_MODE(bRm))
10473 IEMOP_RAISE_INVALID_OPCODE_RET();
10474 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10475}
10476
10477
10478/** Opcode 0x0f 0xb3. */
10479FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10480{
10481 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10482 IEMOP_HLP_MIN_386();
10483 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10484 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10485}
10486
10487
10488/** Opcode 0x0f 0xb4. */
10489FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10490{
10491 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10492 IEMOP_HLP_MIN_386();
10493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10494 if (IEM_IS_MODRM_REG_MODE(bRm))
10495 IEMOP_RAISE_INVALID_OPCODE_RET();
10496 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10497}
10498
10499
10500/** Opcode 0x0f 0xb5. */
10501FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10502{
10503 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10504 IEMOP_HLP_MIN_386();
10505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10506 if (IEM_IS_MODRM_REG_MODE(bRm))
10507 IEMOP_RAISE_INVALID_OPCODE_RET();
10508 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10509}
10510
10511
10512/** Opcode 0x0f 0xb6. */
10513FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10514{
10515 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10516 IEMOP_HLP_MIN_386();
10517
10518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10519
10520 /*
10521 * If rm is denoting a register, no more instruction bytes.
10522 */
10523 if (IEM_IS_MODRM_REG_MODE(bRm))
10524 {
10525 switch (pVCpu->iem.s.enmEffOpSize)
10526 {
10527 case IEMMODE_16BIT:
10528 IEM_MC_BEGIN(0, 1);
10529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10530 IEM_MC_LOCAL(uint16_t, u16Value);
10531 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10532 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10533 IEM_MC_ADVANCE_RIP_AND_FINISH();
10534 IEM_MC_END();
10535 break;
10536
10537 case IEMMODE_32BIT:
10538 IEM_MC_BEGIN(0, 1);
10539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10540 IEM_MC_LOCAL(uint32_t, u32Value);
10541 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10542 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10543 IEM_MC_ADVANCE_RIP_AND_FINISH();
10544 IEM_MC_END();
10545 break;
10546
10547 case IEMMODE_64BIT:
10548 IEM_MC_BEGIN(0, 1);
10549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10550 IEM_MC_LOCAL(uint64_t, u64Value);
10551 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10552 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10553 IEM_MC_ADVANCE_RIP_AND_FINISH();
10554 IEM_MC_END();
10555 break;
10556
10557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10558 }
10559 }
10560 else
10561 {
10562 /*
10563 * We're loading a register from memory.
10564 */
10565 switch (pVCpu->iem.s.enmEffOpSize)
10566 {
10567 case IEMMODE_16BIT:
10568 IEM_MC_BEGIN(0, 2);
10569 IEM_MC_LOCAL(uint16_t, u16Value);
10570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10573 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10574 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10575 IEM_MC_ADVANCE_RIP_AND_FINISH();
10576 IEM_MC_END();
10577 break;
10578
10579 case IEMMODE_32BIT:
10580 IEM_MC_BEGIN(0, 2);
10581 IEM_MC_LOCAL(uint32_t, u32Value);
10582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10585 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10586 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10587 IEM_MC_ADVANCE_RIP_AND_FINISH();
10588 IEM_MC_END();
10589 break;
10590
10591 case IEMMODE_64BIT:
10592 IEM_MC_BEGIN(0, 2);
10593 IEM_MC_LOCAL(uint64_t, u64Value);
10594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10597 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10598 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10599 IEM_MC_ADVANCE_RIP_AND_FINISH();
10600 IEM_MC_END();
10601 break;
10602
10603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10604 }
10605 }
10606}
10607
10608
10609/** Opcode 0x0f 0xb7. */
10610FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10611{
10612 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10613 IEMOP_HLP_MIN_386();
10614
10615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10616
10617 /** @todo Not entirely sure how the operand size prefix is handled here,
10618 * assuming that it will be ignored. Would be nice to have a few
10619 * test for this. */
10620 /*
10621 * If rm is denoting a register, no more instruction bytes.
10622 */
10623 if (IEM_IS_MODRM_REG_MODE(bRm))
10624 {
10625 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10626 {
10627 IEM_MC_BEGIN(0, 1);
10628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10629 IEM_MC_LOCAL(uint32_t, u32Value);
10630 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10631 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10632 IEM_MC_ADVANCE_RIP_AND_FINISH();
10633 IEM_MC_END();
10634 }
10635 else
10636 {
10637 IEM_MC_BEGIN(0, 1);
10638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10639 IEM_MC_LOCAL(uint64_t, u64Value);
10640 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10641 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10642 IEM_MC_ADVANCE_RIP_AND_FINISH();
10643 IEM_MC_END();
10644 }
10645 }
10646 else
10647 {
10648 /*
10649 * We're loading a register from memory.
10650 */
10651 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10652 {
10653 IEM_MC_BEGIN(0, 2);
10654 IEM_MC_LOCAL(uint32_t, u32Value);
10655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10658 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10659 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10660 IEM_MC_ADVANCE_RIP_AND_FINISH();
10661 IEM_MC_END();
10662 }
10663 else
10664 {
10665 IEM_MC_BEGIN(0, 2);
10666 IEM_MC_LOCAL(uint64_t, u64Value);
10667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10670 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10671 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10672 IEM_MC_ADVANCE_RIP_AND_FINISH();
10673 IEM_MC_END();
10674 }
10675 }
10676}
10677
10678
10679/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10680FNIEMOP_UD_STUB(iemOp_jmpe);
10681
10682
10683/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10684FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10685{
10686 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10687 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10688 return iemOp_InvalidNeedRM(pVCpu);
10689#ifndef TST_IEM_CHECK_MC
10690# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10691 static const IEMOPBINSIZES s_Native =
10692 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10693# endif
10694 static const IEMOPBINSIZES s_Fallback =
10695 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10696#endif
10697 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10698 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10699}
10700
10701
10702/**
10703 * @opcode 0xb9
10704 * @opinvalid intel-modrm
10705 * @optest ->
10706 */
10707FNIEMOP_DEF(iemOp_Grp10)
10708{
10709 /*
10710 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10711 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10712 */
10713 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10714 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10715 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10716}
10717
10718
10719/**
10720 * Body for group 8 bit instruction.
10721 */
10722#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10723 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10724 \
10725 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10726 { \
10727 /* register destination. */ \
10728 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10729 \
10730 switch (pVCpu->iem.s.enmEffOpSize) \
10731 { \
10732 case IEMMODE_16BIT: \
10733 IEM_MC_BEGIN(3, 0); \
10734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10735 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10736 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10737 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10738 \
10739 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10740 IEM_MC_REF_EFLAGS(pEFlags); \
10741 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10742 \
10743 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10744 IEM_MC_END(); \
10745 break; \
10746 \
10747 case IEMMODE_32BIT: \
10748 IEM_MC_BEGIN(3, 0); \
10749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10750 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10751 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10752 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10753 \
10754 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10755 IEM_MC_REF_EFLAGS(pEFlags); \
10756 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10757 \
10758 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
10759 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10760 IEM_MC_END(); \
10761 break; \
10762 \
10763 case IEMMODE_64BIT: \
10764 IEM_MC_BEGIN(3, 0); \
10765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10766 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10767 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10768 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10769 \
10770 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10771 IEM_MC_REF_EFLAGS(pEFlags); \
10772 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10773 \
10774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10775 IEM_MC_END(); \
10776 break; \
10777 \
10778 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10779 } \
10780 } \
10781 else \
10782 { \
10783 /* memory destination. */ \
10784 /** @todo test negative bit offsets! */ \
10785 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10786 { \
10787 switch (pVCpu->iem.s.enmEffOpSize) \
10788 { \
10789 case IEMMODE_16BIT: \
10790 IEM_MC_BEGIN(3, 3); \
10791 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10792 IEM_MC_ARG(uint16_t, u16Src, 1); \
10793 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10795 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10796 \
10797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10798 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10799 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10800 IEMOP_HLP_DONE_DECODING(); \
10801 IEM_MC_FETCH_EFLAGS(EFlags); \
10802 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10803 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10804 \
10805 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10806 IEM_MC_COMMIT_EFLAGS(EFlags); \
10807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10808 IEM_MC_END(); \
10809 break; \
10810 \
10811 case IEMMODE_32BIT: \
10812 IEM_MC_BEGIN(3, 3); \
10813 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10814 IEM_MC_ARG(uint32_t, u32Src, 1); \
10815 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10817 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10818 \
10819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10820 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10821 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10822 IEMOP_HLP_DONE_DECODING(); \
10823 IEM_MC_FETCH_EFLAGS(EFlags); \
10824 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10825 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10826 \
10827 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10828 IEM_MC_COMMIT_EFLAGS(EFlags); \
10829 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10830 IEM_MC_END(); \
10831 break; \
10832 \
10833 case IEMMODE_64BIT: \
10834 IEM_MC_BEGIN(3, 3); \
10835 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10836 IEM_MC_ARG(uint64_t, u64Src, 1); \
10837 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10839 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10840 \
10841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10842 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10843 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10844 IEMOP_HLP_DONE_DECODING(); \
10845 IEM_MC_FETCH_EFLAGS(EFlags); \
10846 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10847 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10848 \
10849 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10850 IEM_MC_COMMIT_EFLAGS(EFlags); \
10851 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10852 IEM_MC_END(); \
10853 break; \
10854 \
10855 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10856 } \
10857 } \
10858 else \
10859 { \
10860 (void)0
10861/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10862#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10863 switch (pVCpu->iem.s.enmEffOpSize) \
10864 { \
10865 case IEMMODE_16BIT: \
10866 IEM_MC_BEGIN(3, 3); \
10867 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10868 IEM_MC_ARG(uint16_t, u16Src, 1); \
10869 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10871 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10872 \
10873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10874 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10875 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10876 IEMOP_HLP_DONE_DECODING(); \
10877 IEM_MC_FETCH_EFLAGS(EFlags); \
10878 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10879 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10880 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10881 \
10882 IEM_MC_COMMIT_EFLAGS(EFlags); \
10883 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10884 IEM_MC_END(); \
10885 break; \
10886 \
10887 case IEMMODE_32BIT: \
10888 IEM_MC_BEGIN(3, 3); \
10889 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10890 IEM_MC_ARG(uint32_t, u32Src, 1); \
10891 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10893 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10894 \
10895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10896 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10897 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10898 IEMOP_HLP_DONE_DECODING(); \
10899 IEM_MC_FETCH_EFLAGS(EFlags); \
10900 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10901 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10902 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10903 \
10904 IEM_MC_COMMIT_EFLAGS(EFlags); \
10905 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10906 IEM_MC_END(); \
10907 break; \
10908 \
10909 case IEMMODE_64BIT: \
10910 IEM_MC_BEGIN(3, 3); \
10911 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10912 IEM_MC_ARG(uint64_t, u64Src, 1); \
10913 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10915 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10916 \
10917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10918 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10919 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10920 IEMOP_HLP_DONE_DECODING(); \
10921 IEM_MC_FETCH_EFLAGS(EFlags); \
10922 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10923 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10924 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10925 \
10926 IEM_MC_COMMIT_EFLAGS(EFlags); \
10927 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10928 IEM_MC_END(); \
10929 break; \
10930 \
10931 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10932 } \
10933 } \
10934 } \
10935 (void)0
10936
10937/* Read-only version (bt) */
10938#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10940 \
10941 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10942 { \
10943 /* register destination. */ \
10944 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10945 \
10946 switch (pVCpu->iem.s.enmEffOpSize) \
10947 { \
10948 case IEMMODE_16BIT: \
10949 IEM_MC_BEGIN(3, 0); \
10950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10951 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10952 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10953 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10954 \
10955 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10956 IEM_MC_REF_EFLAGS(pEFlags); \
10957 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10958 \
10959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10960 IEM_MC_END(); \
10961 break; \
10962 \
10963 case IEMMODE_32BIT: \
10964 IEM_MC_BEGIN(3, 0); \
10965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10966 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10967 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10968 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10969 \
10970 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10971 IEM_MC_REF_EFLAGS(pEFlags); \
10972 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10973 \
10974 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10975 IEM_MC_END(); \
10976 break; \
10977 \
10978 case IEMMODE_64BIT: \
10979 IEM_MC_BEGIN(3, 0); \
10980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10981 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10982 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10983 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10984 \
10985 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10986 IEM_MC_REF_EFLAGS(pEFlags); \
10987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10988 \
10989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10990 IEM_MC_END(); \
10991 break; \
10992 \
10993 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10994 } \
10995 } \
10996 else \
10997 { \
10998 /* memory destination. */ \
10999 /** @todo test negative bit offsets! */ \
11000 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11001 { \
11002 switch (pVCpu->iem.s.enmEffOpSize) \
11003 { \
11004 case IEMMODE_16BIT: \
11005 IEM_MC_BEGIN(3, 3); \
11006 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11007 IEM_MC_ARG(uint16_t, u16Src, 1); \
11008 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11010 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11011 \
11012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11013 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11014 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
11015 IEMOP_HLP_DONE_DECODING(); \
11016 IEM_MC_FETCH_EFLAGS(EFlags); \
11017 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11018 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11019 \
11020 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
11021 IEM_MC_COMMIT_EFLAGS(EFlags); \
11022 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11023 IEM_MC_END(); \
11024 break; \
11025 \
11026 case IEMMODE_32BIT: \
11027 IEM_MC_BEGIN(3, 3); \
11028 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11029 IEM_MC_ARG(uint32_t, u32Src, 1); \
11030 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11032 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11033 \
11034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11035 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11036 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
11037 IEMOP_HLP_DONE_DECODING(); \
11038 IEM_MC_FETCH_EFLAGS(EFlags); \
11039 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11040 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11041 \
11042 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
11043 IEM_MC_COMMIT_EFLAGS(EFlags); \
11044 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11045 IEM_MC_END(); \
11046 break; \
11047 \
11048 case IEMMODE_64BIT: \
11049 IEM_MC_BEGIN(3, 3); \
11050 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11051 IEM_MC_ARG(uint64_t, u64Src, 1); \
11052 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11054 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11055 \
11056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11057 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11058 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
11059 IEMOP_HLP_DONE_DECODING(); \
11060 IEM_MC_FETCH_EFLAGS(EFlags); \
11061 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11062 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11063 \
11064 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
11065 IEM_MC_COMMIT_EFLAGS(EFlags); \
11066 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11067 IEM_MC_END(); \
11068 break; \
11069 \
11070 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11071 } \
11072 } \
11073 else \
11074 { \
11075 IEMOP_HLP_DONE_DECODING(); \
11076 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11077 } \
11078 } \
11079 (void)0
11080
11081
11082/** Opcode 0x0f 0xba /4. */
11083FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11084{
11085 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11086 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11087}
11088
11089
11090/** Opcode 0x0f 0xba /5. */
11091FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11092{
11093 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11094 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11095 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11096}
11097
11098
11099/** Opcode 0x0f 0xba /6. */
11100FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11101{
11102 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11103 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11104 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11105}
11106
11107
11108/** Opcode 0x0f 0xba /7. */
11109FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11110{
11111 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11112 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11113 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11114}
11115
11116
11117/** Opcode 0x0f 0xba. */
11118FNIEMOP_DEF(iemOp_Grp8)
11119{
11120 IEMOP_HLP_MIN_386();
11121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11122 switch (IEM_GET_MODRM_REG_8(bRm))
11123 {
11124 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11125 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11126 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11127 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11128
11129 case 0: case 1: case 2: case 3:
11130 /* Both AMD and Intel want full modr/m decoding and imm8. */
11131 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11132
11133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11134 }
11135}
11136
11137
11138/** Opcode 0x0f 0xbb. */
11139FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11140{
11141 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11142 IEMOP_HLP_MIN_386();
11143 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11144 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11145}
11146
11147
11148/**
11149 * Common worker for BSF and BSR instructions.
11150 *
11151 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11152 * the destination register, which means that for 32-bit operations the high
11153 * bits must be left alone.
11154 *
11155 * @param pImpl Pointer to the instruction implementation (assembly).
11156 */
11157FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
11158{
11159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11160
11161 /*
11162 * If rm is denoting a register, no more instruction bytes.
11163 */
11164 if (IEM_IS_MODRM_REG_MODE(bRm))
11165 {
11166 switch (pVCpu->iem.s.enmEffOpSize)
11167 {
11168 case IEMMODE_16BIT:
11169 IEM_MC_BEGIN(3, 0);
11170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11171 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11172 IEM_MC_ARG(uint16_t, u16Src, 1);
11173 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11174
11175 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11176 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11177 IEM_MC_REF_EFLAGS(pEFlags);
11178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11179
11180 IEM_MC_ADVANCE_RIP_AND_FINISH();
11181 IEM_MC_END();
11182 break;
11183
11184 case IEMMODE_32BIT:
11185 IEM_MC_BEGIN(3, 0);
11186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11188 IEM_MC_ARG(uint32_t, u32Src, 1);
11189 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11190
11191 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11192 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11193 IEM_MC_REF_EFLAGS(pEFlags);
11194 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11195 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11196 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11197 } IEM_MC_ENDIF();
11198 IEM_MC_ADVANCE_RIP_AND_FINISH();
11199 IEM_MC_END();
11200 break;
11201
11202 case IEMMODE_64BIT:
11203 IEM_MC_BEGIN(3, 0);
11204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11205 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11206 IEM_MC_ARG(uint64_t, u64Src, 1);
11207 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11208
11209 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11210 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11211 IEM_MC_REF_EFLAGS(pEFlags);
11212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11213
11214 IEM_MC_ADVANCE_RIP_AND_FINISH();
11215 IEM_MC_END();
11216 break;
11217
11218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11219 }
11220 }
11221 else
11222 {
11223 /*
11224 * We're accessing memory.
11225 */
11226 switch (pVCpu->iem.s.enmEffOpSize)
11227 {
11228 case IEMMODE_16BIT:
11229 IEM_MC_BEGIN(3, 1);
11230 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11231 IEM_MC_ARG(uint16_t, u16Src, 1);
11232 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11234
11235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11237 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11238 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11239 IEM_MC_REF_EFLAGS(pEFlags);
11240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11241
11242 IEM_MC_ADVANCE_RIP_AND_FINISH();
11243 IEM_MC_END();
11244 break;
11245
11246 case IEMMODE_32BIT:
11247 IEM_MC_BEGIN(3, 1);
11248 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11249 IEM_MC_ARG(uint32_t, u32Src, 1);
11250 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11252
11253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11255 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11256 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11257 IEM_MC_REF_EFLAGS(pEFlags);
11258 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11259
11260 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11261 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11262 } IEM_MC_ENDIF();
11263 IEM_MC_ADVANCE_RIP_AND_FINISH();
11264 IEM_MC_END();
11265 break;
11266
11267 case IEMMODE_64BIT:
11268 IEM_MC_BEGIN(3, 1);
11269 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11270 IEM_MC_ARG(uint64_t, u64Src, 1);
11271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11273
11274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11276 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11277 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11278 IEM_MC_REF_EFLAGS(pEFlags);
11279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11280
11281 IEM_MC_ADVANCE_RIP_AND_FINISH();
11282 IEM_MC_END();
11283 break;
11284
11285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11286 }
11287 }
11288}
11289
11290
11291/** Opcode 0x0f 0xbc. */
11292FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11293{
11294 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11295 IEMOP_HLP_MIN_386();
11296 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11297 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11298}
11299
11300
11301/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11302FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11303{
11304 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11305 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11306 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11307
11308#ifndef TST_IEM_CHECK_MC
11309 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11310 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11311 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11312 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11313 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11314 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11315 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11316 {
11317 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11318 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11319 };
11320#endif
11321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11322 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11323 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11324 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11325}
11326
11327
11328/** Opcode 0x0f 0xbd. */
11329FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11330{
11331 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11332 IEMOP_HLP_MIN_386();
11333 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11334 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11335}
11336
11337
11338/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11339FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11340{
11341 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11342 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11343 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11344
11345#ifndef TST_IEM_CHECK_MC
11346 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11347 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11348 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11349 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11350 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11351 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11352 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11353 {
11354 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11355 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11356 };
11357#endif
11358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11359 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11360 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11361 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11362}
11363
11364
11365
11366/** Opcode 0x0f 0xbe. */
11367FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11368{
11369 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11370 IEMOP_HLP_MIN_386();
11371
11372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11373
11374 /*
11375 * If rm is denoting a register, no more instruction bytes.
11376 */
11377 if (IEM_IS_MODRM_REG_MODE(bRm))
11378 {
11379 switch (pVCpu->iem.s.enmEffOpSize)
11380 {
11381 case IEMMODE_16BIT:
11382 IEM_MC_BEGIN(0, 1);
11383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11384 IEM_MC_LOCAL(uint16_t, u16Value);
11385 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11386 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11387 IEM_MC_ADVANCE_RIP_AND_FINISH();
11388 IEM_MC_END();
11389 break;
11390
11391 case IEMMODE_32BIT:
11392 IEM_MC_BEGIN(0, 1);
11393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11394 IEM_MC_LOCAL(uint32_t, u32Value);
11395 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11396 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11397 IEM_MC_ADVANCE_RIP_AND_FINISH();
11398 IEM_MC_END();
11399 break;
11400
11401 case IEMMODE_64BIT:
11402 IEM_MC_BEGIN(0, 1);
11403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11404 IEM_MC_LOCAL(uint64_t, u64Value);
11405 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11406 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11407 IEM_MC_ADVANCE_RIP_AND_FINISH();
11408 IEM_MC_END();
11409 break;
11410
11411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11412 }
11413 }
11414 else
11415 {
11416 /*
11417 * We're loading a register from memory.
11418 */
11419 switch (pVCpu->iem.s.enmEffOpSize)
11420 {
11421 case IEMMODE_16BIT:
11422 IEM_MC_BEGIN(0, 2);
11423 IEM_MC_LOCAL(uint16_t, u16Value);
11424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11427 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11428 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11429 IEM_MC_ADVANCE_RIP_AND_FINISH();
11430 IEM_MC_END();
11431 break;
11432
11433 case IEMMODE_32BIT:
11434 IEM_MC_BEGIN(0, 2);
11435 IEM_MC_LOCAL(uint32_t, u32Value);
11436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11439 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11440 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11441 IEM_MC_ADVANCE_RIP_AND_FINISH();
11442 IEM_MC_END();
11443 break;
11444
11445 case IEMMODE_64BIT:
11446 IEM_MC_BEGIN(0, 2);
11447 IEM_MC_LOCAL(uint64_t, u64Value);
11448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11451 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11452 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11453 IEM_MC_ADVANCE_RIP_AND_FINISH();
11454 IEM_MC_END();
11455 break;
11456
11457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11458 }
11459 }
11460}
11461
11462
11463/** Opcode 0x0f 0xbf. */
11464FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11465{
11466 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11467 IEMOP_HLP_MIN_386();
11468
11469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11470
11471 /** @todo Not entirely sure how the operand size prefix is handled here,
11472 * assuming that it will be ignored. Would be nice to have a few
11473 * test for this. */
11474 /*
11475 * If rm is denoting a register, no more instruction bytes.
11476 */
11477 if (IEM_IS_MODRM_REG_MODE(bRm))
11478 {
11479 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11480 {
11481 IEM_MC_BEGIN(0, 1);
11482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11483 IEM_MC_LOCAL(uint32_t, u32Value);
11484 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11485 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11486 IEM_MC_ADVANCE_RIP_AND_FINISH();
11487 IEM_MC_END();
11488 }
11489 else
11490 {
11491 IEM_MC_BEGIN(0, 1);
11492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11493 IEM_MC_LOCAL(uint64_t, u64Value);
11494 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11495 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11496 IEM_MC_ADVANCE_RIP_AND_FINISH();
11497 IEM_MC_END();
11498 }
11499 }
11500 else
11501 {
11502 /*
11503 * We're loading a register from memory.
11504 */
11505 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11506 {
11507 IEM_MC_BEGIN(0, 2);
11508 IEM_MC_LOCAL(uint32_t, u32Value);
11509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11512 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11513 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11514 IEM_MC_ADVANCE_RIP_AND_FINISH();
11515 IEM_MC_END();
11516 }
11517 else
11518 {
11519 IEM_MC_BEGIN(0, 2);
11520 IEM_MC_LOCAL(uint64_t, u64Value);
11521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11524 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11525 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11526 IEM_MC_ADVANCE_RIP_AND_FINISH();
11527 IEM_MC_END();
11528 }
11529 }
11530}
11531
11532
11533/** Opcode 0x0f 0xc0. */
11534FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11535{
11536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11537 IEMOP_HLP_MIN_486();
11538 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11539
11540 /*
11541 * If rm is denoting a register, no more instruction bytes.
11542 */
11543 if (IEM_IS_MODRM_REG_MODE(bRm))
11544 {
11545 IEM_MC_BEGIN(3, 0);
11546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11547 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11548 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11549 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11550
11551 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11552 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11553 IEM_MC_REF_EFLAGS(pEFlags);
11554 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11555
11556 IEM_MC_ADVANCE_RIP_AND_FINISH();
11557 IEM_MC_END();
11558 }
11559 else
11560 {
11561 /*
11562 * We're accessing memory.
11563 */
11564 IEM_MC_BEGIN(3, 3);
11565 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11566 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11567 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11568 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11570
11571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11572 IEMOP_HLP_DONE_DECODING();
11573 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11574 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11575 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11576 IEM_MC_FETCH_EFLAGS(EFlags);
11577 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11578 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11579 else
11580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11581
11582 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11583 IEM_MC_COMMIT_EFLAGS(EFlags);
11584 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11585 IEM_MC_ADVANCE_RIP_AND_FINISH();
11586 IEM_MC_END();
11587 }
11588}
11589
11590
11591/** Opcode 0x0f 0xc1. */
11592FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11593{
11594 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11595 IEMOP_HLP_MIN_486();
11596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11597
11598 /*
11599 * If rm is denoting a register, no more instruction bytes.
11600 */
11601 if (IEM_IS_MODRM_REG_MODE(bRm))
11602 {
11603 switch (pVCpu->iem.s.enmEffOpSize)
11604 {
11605 case IEMMODE_16BIT:
11606 IEM_MC_BEGIN(3, 0);
11607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11608 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11609 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11610 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11611
11612 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11613 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11614 IEM_MC_REF_EFLAGS(pEFlags);
11615 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11616
11617 IEM_MC_ADVANCE_RIP_AND_FINISH();
11618 IEM_MC_END();
11619 break;
11620
11621 case IEMMODE_32BIT:
11622 IEM_MC_BEGIN(3, 0);
11623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11624 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11625 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11626 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11627
11628 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11629 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11630 IEM_MC_REF_EFLAGS(pEFlags);
11631 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11632
11633 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11634 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11635 IEM_MC_ADVANCE_RIP_AND_FINISH();
11636 IEM_MC_END();
11637 break;
11638
11639 case IEMMODE_64BIT:
11640 IEM_MC_BEGIN(3, 0);
11641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11642 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11643 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11644 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11645
11646 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11647 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11648 IEM_MC_REF_EFLAGS(pEFlags);
11649 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11650
11651 IEM_MC_ADVANCE_RIP_AND_FINISH();
11652 IEM_MC_END();
11653 break;
11654
11655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11656 }
11657 }
11658 else
11659 {
11660 /*
11661 * We're accessing memory.
11662 */
11663 switch (pVCpu->iem.s.enmEffOpSize)
11664 {
11665 case IEMMODE_16BIT:
11666 IEM_MC_BEGIN(3, 3);
11667 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11668 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11669 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11670 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11672
11673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11674 IEMOP_HLP_DONE_DECODING();
11675 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11676 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11677 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11678 IEM_MC_FETCH_EFLAGS(EFlags);
11679 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11680 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11681 else
11682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11683
11684 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11685 IEM_MC_COMMIT_EFLAGS(EFlags);
11686 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11687 IEM_MC_ADVANCE_RIP_AND_FINISH();
11688 IEM_MC_END();
11689 break;
11690
11691 case IEMMODE_32BIT:
11692 IEM_MC_BEGIN(3, 3);
11693 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11694 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11695 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11696 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11698
11699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11700 IEMOP_HLP_DONE_DECODING();
11701 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11702 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11703 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11704 IEM_MC_FETCH_EFLAGS(EFlags);
11705 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11706 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11707 else
11708 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11709
11710 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11711 IEM_MC_COMMIT_EFLAGS(EFlags);
11712 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11713 IEM_MC_ADVANCE_RIP_AND_FINISH();
11714 IEM_MC_END();
11715 break;
11716
11717 case IEMMODE_64BIT:
11718 IEM_MC_BEGIN(3, 3);
11719 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11720 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11721 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11722 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11724
11725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11726 IEMOP_HLP_DONE_DECODING();
11727 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11728 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11729 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11730 IEM_MC_FETCH_EFLAGS(EFlags);
11731 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11732 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11733 else
11734 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11735
11736 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11737 IEM_MC_COMMIT_EFLAGS(EFlags);
11738 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11739 IEM_MC_ADVANCE_RIP_AND_FINISH();
11740 IEM_MC_END();
11741 break;
11742
11743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11744 }
11745 }
11746}
11747
11748
11749/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11750FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11751{
11752 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11753
11754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11755 if (IEM_IS_MODRM_REG_MODE(bRm))
11756 {
11757 /*
11758 * XMM, XMM.
11759 */
11760 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11761 IEM_MC_BEGIN(4, 2);
11762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11763 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11764 IEM_MC_LOCAL(X86XMMREG, Dst);
11765 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11766 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11767 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11768 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11769 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11770 IEM_MC_PREPARE_SSE_USAGE();
11771 IEM_MC_REF_MXCSR(pfMxcsr);
11772 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11773 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11774 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11775 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11776 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11777 } IEM_MC_ELSE() {
11778 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11779 } IEM_MC_ENDIF();
11780
11781 IEM_MC_ADVANCE_RIP_AND_FINISH();
11782 IEM_MC_END();
11783 }
11784 else
11785 {
11786 /*
11787 * XMM, [mem128].
11788 */
11789 IEM_MC_BEGIN(4, 3);
11790 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11791 IEM_MC_LOCAL(X86XMMREG, Dst);
11792 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11793 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11794 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11796
11797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11798 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11799 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11801 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11802 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11803
11804 IEM_MC_PREPARE_SSE_USAGE();
11805 IEM_MC_REF_MXCSR(pfMxcsr);
11806 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11807 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11808 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11809 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11810 } IEM_MC_ELSE() {
11811 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11812 } IEM_MC_ENDIF();
11813
11814 IEM_MC_ADVANCE_RIP_AND_FINISH();
11815 IEM_MC_END();
11816 }
11817}
11818
11819
11820/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11821FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11822{
11823 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11824
11825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11826 if (IEM_IS_MODRM_REG_MODE(bRm))
11827 {
11828 /*
11829 * XMM, XMM.
11830 */
11831 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11832 IEM_MC_BEGIN(4, 2);
11833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11834 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11835 IEM_MC_LOCAL(X86XMMREG, Dst);
11836 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11837 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11838 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11839 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11841 IEM_MC_PREPARE_SSE_USAGE();
11842 IEM_MC_REF_MXCSR(pfMxcsr);
11843 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11844 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11845 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11846 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11848 } IEM_MC_ELSE() {
11849 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11850 } IEM_MC_ENDIF();
11851
11852 IEM_MC_ADVANCE_RIP_AND_FINISH();
11853 IEM_MC_END();
11854 }
11855 else
11856 {
11857 /*
11858 * XMM, [mem128].
11859 */
11860 IEM_MC_BEGIN(4, 3);
11861 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11862 IEM_MC_LOCAL(X86XMMREG, Dst);
11863 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11864 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11865 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11867
11868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11869 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11870 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11872 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11873 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11874
11875 IEM_MC_PREPARE_SSE_USAGE();
11876 IEM_MC_REF_MXCSR(pfMxcsr);
11877 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11878 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11879 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11880 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11881 } IEM_MC_ELSE() {
11882 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11883 } IEM_MC_ENDIF();
11884
11885 IEM_MC_ADVANCE_RIP_AND_FINISH();
11886 IEM_MC_END();
11887 }
11888}
11889
11890
11891/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11892FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11893{
11894 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11895
11896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11897 if (IEM_IS_MODRM_REG_MODE(bRm))
11898 {
11899 /*
11900 * XMM32, XMM32.
11901 */
11902 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11903 IEM_MC_BEGIN(4, 2);
11904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11905 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11906 IEM_MC_LOCAL(X86XMMREG, Dst);
11907 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11908 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11909 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11910 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11911 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11912 IEM_MC_PREPARE_SSE_USAGE();
11913 IEM_MC_REF_MXCSR(pfMxcsr);
11914 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11915 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11916 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11917 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11918 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11919 } IEM_MC_ELSE() {
11920 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11921 } IEM_MC_ENDIF();
11922
11923 IEM_MC_ADVANCE_RIP_AND_FINISH();
11924 IEM_MC_END();
11925 }
11926 else
11927 {
11928 /*
11929 * XMM32, [mem32].
11930 */
11931 IEM_MC_BEGIN(4, 3);
11932 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11933 IEM_MC_LOCAL(X86XMMREG, Dst);
11934 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11935 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11936 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11938
11939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11940 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11941 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11944 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11945
11946 IEM_MC_PREPARE_SSE_USAGE();
11947 IEM_MC_REF_MXCSR(pfMxcsr);
11948 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11949 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11950 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11951 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11952 } IEM_MC_ELSE() {
11953 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11954 } IEM_MC_ENDIF();
11955
11956 IEM_MC_ADVANCE_RIP_AND_FINISH();
11957 IEM_MC_END();
11958 }
11959}
11960
11961
11962/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11963FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11964{
11965 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11966
11967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11968 if (IEM_IS_MODRM_REG_MODE(bRm))
11969 {
11970 /*
11971 * XMM64, XMM64.
11972 */
11973 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11974 IEM_MC_BEGIN(4, 2);
11975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11976 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11977 IEM_MC_LOCAL(X86XMMREG, Dst);
11978 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11979 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11980 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11981 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11983 IEM_MC_PREPARE_SSE_USAGE();
11984 IEM_MC_REF_MXCSR(pfMxcsr);
11985 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11986 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11987 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11988 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11989 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11990 } IEM_MC_ELSE() {
11991 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11992 } IEM_MC_ENDIF();
11993
11994 IEM_MC_ADVANCE_RIP_AND_FINISH();
11995 IEM_MC_END();
11996 }
11997 else
11998 {
11999 /*
12000 * XMM64, [mem64].
12001 */
12002 IEM_MC_BEGIN(4, 3);
12003 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12004 IEM_MC_LOCAL(X86XMMREG, Dst);
12005 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12006 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12007 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12009
12010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12011 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12012 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12014 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12015 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12016
12017 IEM_MC_PREPARE_SSE_USAGE();
12018 IEM_MC_REF_MXCSR(pfMxcsr);
12019 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
12020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12021 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12022 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12023 } IEM_MC_ELSE() {
12024 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12025 } IEM_MC_ENDIF();
12026
12027 IEM_MC_ADVANCE_RIP_AND_FINISH();
12028 IEM_MC_END();
12029 }
12030}
12031
12032
12033/** Opcode 0x0f 0xc3. */
12034FNIEMOP_DEF(iemOp_movnti_My_Gy)
12035{
12036 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12037
12038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12039
12040 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12041 if (IEM_IS_MODRM_MEM_MODE(bRm))
12042 {
12043 switch (pVCpu->iem.s.enmEffOpSize)
12044 {
12045 case IEMMODE_32BIT:
12046 IEM_MC_BEGIN(0, 2);
12047 IEM_MC_LOCAL(uint32_t, u32Value);
12048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12049
12050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12052
12053 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12054 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12055 IEM_MC_ADVANCE_RIP_AND_FINISH();
12056 IEM_MC_END();
12057 break;
12058
12059 case IEMMODE_64BIT:
12060 IEM_MC_BEGIN(0, 2);
12061 IEM_MC_LOCAL(uint64_t, u64Value);
12062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12063
12064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12066
12067 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12068 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12069 IEM_MC_ADVANCE_RIP_AND_FINISH();
12070 IEM_MC_END();
12071 break;
12072
12073 case IEMMODE_16BIT:
12074 /** @todo check this form. */
12075 IEMOP_RAISE_INVALID_OPCODE_RET();
12076
12077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12078 }
12079 }
12080 else
12081 IEMOP_RAISE_INVALID_OPCODE_RET();
12082}
12083
12084
12085/* Opcode 0x66 0x0f 0xc3 - invalid */
12086/* Opcode 0xf3 0x0f 0xc3 - invalid */
12087/* Opcode 0xf2 0x0f 0xc3 - invalid */
12088
12089
12090/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12091FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12092{
12093 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12095 if (IEM_IS_MODRM_REG_MODE(bRm))
12096 {
12097 /*
12098 * Register, register.
12099 */
12100 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12101 IEM_MC_BEGIN(3, 0);
12102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12103 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12104 IEM_MC_ARG(uint16_t, u16Src, 1);
12105 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12107 IEM_MC_PREPARE_FPU_USAGE();
12108 IEM_MC_FPU_TO_MMX_MODE();
12109 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12110 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12111 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12112 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12113 IEM_MC_ADVANCE_RIP_AND_FINISH();
12114 IEM_MC_END();
12115 }
12116 else
12117 {
12118 /*
12119 * Register, memory.
12120 */
12121 IEM_MC_BEGIN(3, 1);
12122 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12123 IEM_MC_ARG(uint16_t, u16Src, 1);
12124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12125
12126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12127 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12128 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12130 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12131 IEM_MC_PREPARE_FPU_USAGE();
12132 IEM_MC_FPU_TO_MMX_MODE();
12133
12134 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12135 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12136 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12137 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12138 IEM_MC_ADVANCE_RIP_AND_FINISH();
12139 IEM_MC_END();
12140 }
12141}
12142
12143
12144/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12145FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12146{
12147 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12149 if (IEM_IS_MODRM_REG_MODE(bRm))
12150 {
12151 /*
12152 * Register, register.
12153 */
12154 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12155 IEM_MC_BEGIN(3, 0);
12156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12157 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12158 IEM_MC_ARG(uint16_t, u16Src, 1);
12159 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12160 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12161 IEM_MC_PREPARE_SSE_USAGE();
12162 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12163 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12165 IEM_MC_ADVANCE_RIP_AND_FINISH();
12166 IEM_MC_END();
12167 }
12168 else
12169 {
12170 /*
12171 * Register, memory.
12172 */
12173 IEM_MC_BEGIN(3, 2);
12174 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12175 IEM_MC_ARG(uint16_t, u16Src, 1);
12176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12177
12178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12179 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12180 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12182 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12183 IEM_MC_PREPARE_SSE_USAGE();
12184
12185 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12186 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12187 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12188 IEM_MC_ADVANCE_RIP_AND_FINISH();
12189 IEM_MC_END();
12190 }
12191}
12192
12193
12194/* Opcode 0xf3 0x0f 0xc4 - invalid */
12195/* Opcode 0xf2 0x0f 0xc4 - invalid */
12196
12197
12198/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12199FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12200{
12201 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12203 if (IEM_IS_MODRM_REG_MODE(bRm))
12204 {
12205 /*
12206 * Greg32, MMX, imm8.
12207 */
12208 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12209 IEM_MC_BEGIN(3, 1);
12210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12211 IEM_MC_LOCAL(uint16_t, u16Dst);
12212 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12213 IEM_MC_ARG(uint64_t, u64Src, 1);
12214 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12215 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12216 IEM_MC_PREPARE_FPU_USAGE();
12217 IEM_MC_FPU_TO_MMX_MODE();
12218 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12219 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12220 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12221 IEM_MC_ADVANCE_RIP_AND_FINISH();
12222 IEM_MC_END();
12223 }
12224 /* No memory operand. */
12225 else
12226 IEMOP_RAISE_INVALID_OPCODE_RET();
12227}
12228
12229
12230/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12231FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12232{
12233 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12235 if (IEM_IS_MODRM_REG_MODE(bRm))
12236 {
12237 /*
12238 * Greg32, XMM, imm8.
12239 */
12240 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12241 IEM_MC_BEGIN(3, 1);
12242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12243 IEM_MC_LOCAL(uint16_t, u16Dst);
12244 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12245 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12246 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12248 IEM_MC_PREPARE_SSE_USAGE();
12249 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12250 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12251 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12252 IEM_MC_ADVANCE_RIP_AND_FINISH();
12253 IEM_MC_END();
12254 }
12255 /* No memory operand. */
12256 else
12257 IEMOP_RAISE_INVALID_OPCODE_RET();
12258}
12259
12260
12261/* Opcode 0xf3 0x0f 0xc5 - invalid */
12262/* Opcode 0xf2 0x0f 0xc5 - invalid */
12263
12264
12265/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12266FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12267{
12268 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12270 if (IEM_IS_MODRM_REG_MODE(bRm))
12271 {
12272 /*
12273 * XMM, XMM, imm8.
12274 */
12275 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12276 IEM_MC_BEGIN(3, 0);
12277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12278 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12279 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12280 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12281 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12282 IEM_MC_PREPARE_SSE_USAGE();
12283 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12284 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12285 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12286 IEM_MC_ADVANCE_RIP_AND_FINISH();
12287 IEM_MC_END();
12288 }
12289 else
12290 {
12291 /*
12292 * XMM, [mem128], imm8.
12293 */
12294 IEM_MC_BEGIN(3, 2);
12295 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12296 IEM_MC_LOCAL(RTUINT128U, uSrc);
12297 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12299
12300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12301 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12302 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12304 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12305 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12306
12307 IEM_MC_PREPARE_SSE_USAGE();
12308 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12309 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12310
12311 IEM_MC_ADVANCE_RIP_AND_FINISH();
12312 IEM_MC_END();
12313 }
12314}
12315
12316
12317/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12318FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12319{
12320 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12322 if (IEM_IS_MODRM_REG_MODE(bRm))
12323 {
12324 /*
12325 * XMM, XMM, imm8.
12326 */
12327 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12328 IEM_MC_BEGIN(3, 0);
12329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12330 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12331 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12332 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12333 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12334 IEM_MC_PREPARE_SSE_USAGE();
12335 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12336 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12337 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12338 IEM_MC_ADVANCE_RIP_AND_FINISH();
12339 IEM_MC_END();
12340 }
12341 else
12342 {
12343 /*
12344 * XMM, [mem128], imm8.
12345 */
12346 IEM_MC_BEGIN(3, 2);
12347 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12348 IEM_MC_LOCAL(RTUINT128U, uSrc);
12349 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12351
12352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12353 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12354 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12356 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12357 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12358
12359 IEM_MC_PREPARE_SSE_USAGE();
12360 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12361 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12362
12363 IEM_MC_ADVANCE_RIP_AND_FINISH();
12364 IEM_MC_END();
12365 }
12366}
12367
12368
12369/* Opcode 0xf3 0x0f 0xc6 - invalid */
12370/* Opcode 0xf2 0x0f 0xc6 - invalid */
12371
12372
12373/** Opcode 0x0f 0xc7 !11/1. */
12374FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12375{
12376 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12377
12378 IEM_MC_BEGIN(4, 3);
12379 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12380 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12381 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12383 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12384 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12386
12387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12388 IEMOP_HLP_DONE_DECODING();
12389 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12390
12391 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12392 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12393 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12394
12395 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12396 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12397 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12398
12399 IEM_MC_FETCH_EFLAGS(EFlags);
12400 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12401 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12402 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12403 else
12404 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12405
12406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12407 IEM_MC_COMMIT_EFLAGS(EFlags);
12408 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12409 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12410 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12411 } IEM_MC_ENDIF();
12412 IEM_MC_ADVANCE_RIP_AND_FINISH();
12413
12414 IEM_MC_END();
12415}
12416
12417
12418/** Opcode REX.W 0x0f 0xc7 !11/1. */
12419FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12420{
12421 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12422 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12423 {
12424 IEM_MC_BEGIN(4, 3);
12425 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12426 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12427 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12428 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12429 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12430 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12432
12433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12434 IEMOP_HLP_DONE_DECODING();
12435 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12436 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12437
12438 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12439 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12440 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12441
12442 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12443 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12444 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12445
12446 IEM_MC_FETCH_EFLAGS(EFlags);
12447
12448#ifdef RT_ARCH_AMD64 /* some code duplication here because IEMAllInstPython.py cannot parse if/else/#if spaghetti. */
12449 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12450 {
12451 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12452 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12453 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12454 else
12455 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12456 }
12457 else
12458 { /* (see comments in #else case below) */
12459 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12460 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12461 else
12462 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12463 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12464 }
12465
12466#elif defined(RT_ARCH_ARM64)
12467 /** @todo may require fallback for unaligned accesses... */
12468 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12469 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12470 else
12471 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12472
12473#else
12474 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12475 accesses and not all all atomic, which works fine on in UNI CPU guest
12476 configuration (ignoring DMA). If guest SMP is active we have no choice
12477 but to use a rendezvous callback here. Sigh. */
12478 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12479 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12480 else
12481 {
12482 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12483 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12484 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12485 }
12486#endif
12487
12488 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12489 IEM_MC_COMMIT_EFLAGS(EFlags);
12490 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12491 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12492 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12493 } IEM_MC_ENDIF();
12494 IEM_MC_ADVANCE_RIP_AND_FINISH();
12495
12496 IEM_MC_END();
12497 }
12498 Log(("cmpxchg16b -> #UD\n"));
12499 IEMOP_RAISE_INVALID_OPCODE_RET();
12500}
12501
12502FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12503{
12504 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12505 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12506 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12507}
12508
12509
12510/** Opcode 0x0f 0xc7 11/6. */
12511FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12512{
12513 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12514 IEMOP_RAISE_INVALID_OPCODE_RET();
12515
12516 if (IEM_IS_MODRM_REG_MODE(bRm))
12517 {
12518 /* register destination. */
12519 switch (pVCpu->iem.s.enmEffOpSize)
12520 {
12521 case IEMMODE_16BIT:
12522 IEM_MC_BEGIN(2, 0);
12523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12524 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12525 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12526
12527 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12528 IEM_MC_REF_EFLAGS(pEFlags);
12529 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12530 pu16Dst, pEFlags);
12531
12532 IEM_MC_ADVANCE_RIP_AND_FINISH();
12533 IEM_MC_END();
12534 break;
12535
12536 case IEMMODE_32BIT:
12537 IEM_MC_BEGIN(2, 0);
12538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12539 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12540 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12541
12542 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12543 IEM_MC_REF_EFLAGS(pEFlags);
12544 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12545 pu32Dst, pEFlags);
12546
12547 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12548 IEM_MC_ADVANCE_RIP_AND_FINISH();
12549 IEM_MC_END();
12550 break;
12551
12552 case IEMMODE_64BIT:
12553 IEM_MC_BEGIN(2, 0);
12554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12555 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12556 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12557
12558 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12559 IEM_MC_REF_EFLAGS(pEFlags);
12560 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12561 pu64Dst, pEFlags);
12562
12563 IEM_MC_ADVANCE_RIP_AND_FINISH();
12564 IEM_MC_END();
12565 break;
12566
12567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12568 }
12569 }
12570 /* Register only. */
12571 else
12572 IEMOP_RAISE_INVALID_OPCODE_RET();
12573}
12574
12575/** Opcode 0x0f 0xc7 !11/6. */
12576#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12577FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12578{
12579 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12580 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12581 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12582 IEM_MC_BEGIN(2, 0);
12583 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12584 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12586 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12587 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12588 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12589 IEM_MC_END();
12590}
12591#else
12592FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12593#endif
12594
12595/** Opcode 0x66 0x0f 0xc7 !11/6. */
12596#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12597FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12598{
12599 IEMOP_MNEMONIC(vmclear, "vmclear");
12600 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12601 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12602 IEM_MC_BEGIN(2, 0);
12603 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12604 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12606 IEMOP_HLP_DONE_DECODING();
12607 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12608 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12609 IEM_MC_END();
12610}
12611#else
12612FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12613#endif
12614
12615/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12616#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12617FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12618{
12619 IEMOP_MNEMONIC(vmxon, "vmxon");
12620 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12621 IEM_MC_BEGIN(2, 0);
12622 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12623 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12625 IEMOP_HLP_DONE_DECODING();
12626 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12627 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12628 IEM_MC_END();
12629}
12630#else
12631FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12632#endif
12633
12634/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12635#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12636FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12637{
12638 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12639 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12640 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12641 IEM_MC_BEGIN(2, 0);
12642 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12643 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12645 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12646 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12647 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12648 IEM_MC_END();
12649}
12650#else
12651FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12652#endif
12653
12654/** Opcode 0x0f 0xc7 11/7. */
12655FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12656{
12657 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12658 IEMOP_RAISE_INVALID_OPCODE_RET();
12659
12660 if (IEM_IS_MODRM_REG_MODE(bRm))
12661 {
12662 /* register destination. */
12663 switch (pVCpu->iem.s.enmEffOpSize)
12664 {
12665 case IEMMODE_16BIT:
12666 IEM_MC_BEGIN(2, 0);
12667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12668 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12669 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12670
12671 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12672 IEM_MC_REF_EFLAGS(pEFlags);
12673 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12674 pu16Dst, pEFlags);
12675
12676 IEM_MC_ADVANCE_RIP_AND_FINISH();
12677 IEM_MC_END();
12678 break;
12679
12680 case IEMMODE_32BIT:
12681 IEM_MC_BEGIN(2, 0);
12682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12683 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12684 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12685
12686 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12687 IEM_MC_REF_EFLAGS(pEFlags);
12688 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12689 pu32Dst, pEFlags);
12690
12691 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12692 IEM_MC_ADVANCE_RIP_AND_FINISH();
12693 IEM_MC_END();
12694 break;
12695
12696 case IEMMODE_64BIT:
12697 IEM_MC_BEGIN(2, 0);
12698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12699 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12700 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12701
12702 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12703 IEM_MC_REF_EFLAGS(pEFlags);
12704 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12705 pu64Dst, pEFlags);
12706
12707 IEM_MC_ADVANCE_RIP_AND_FINISH();
12708 IEM_MC_END();
12709 break;
12710
12711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12712 }
12713 }
12714 /* Register only. */
12715 else
12716 IEMOP_RAISE_INVALID_OPCODE_RET();
12717}
12718
12719/**
12720 * Group 9 jump table for register variant.
12721 */
12722IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12723{ /* pfx: none, 066h, 0f3h, 0f2h */
12724 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12725 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12726 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12727 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12728 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12729 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12730 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12731 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12732};
12733AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12734
12735
12736/**
12737 * Group 9 jump table for memory variant.
12738 */
12739IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12740{ /* pfx: none, 066h, 0f3h, 0f2h */
12741 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12742 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12743 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12744 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12745 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12746 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12747 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12748 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12749};
12750AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12751
12752
12753/** Opcode 0x0f 0xc7. */
12754FNIEMOP_DEF(iemOp_Grp9)
12755{
12756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12757 if (IEM_IS_MODRM_REG_MODE(bRm))
12758 /* register, register */
12759 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12760 + pVCpu->iem.s.idxPrefix], bRm);
12761 /* memory, register */
12762 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12763 + pVCpu->iem.s.idxPrefix], bRm);
12764}
12765
12766
12767/**
12768 * Common 'bswap register' helper.
12769 */
12770FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12771{
12772 switch (pVCpu->iem.s.enmEffOpSize)
12773 {
12774 case IEMMODE_16BIT:
12775 IEM_MC_BEGIN(1, 0);
12776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12777 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12778 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12779 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12780 IEM_MC_ADVANCE_RIP_AND_FINISH();
12781 IEM_MC_END();
12782 break;
12783
12784 case IEMMODE_32BIT:
12785 IEM_MC_BEGIN(1, 0);
12786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12787 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12788 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12789 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12790 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12791 IEM_MC_ADVANCE_RIP_AND_FINISH();
12792 IEM_MC_END();
12793 break;
12794
12795 case IEMMODE_64BIT:
12796 IEM_MC_BEGIN(1, 0);
12797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12798 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12799 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12800 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12801 IEM_MC_ADVANCE_RIP_AND_FINISH();
12802 IEM_MC_END();
12803 break;
12804
12805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12806 }
12807}
12808
12809
12810/** Opcode 0x0f 0xc8. */
12811FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12812{
12813 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12814 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12815 prefix. REX.B is the correct prefix it appears. For a parallel
12816 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12817 IEMOP_HLP_MIN_486();
12818 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12819}
12820
12821
12822/** Opcode 0x0f 0xc9. */
12823FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12824{
12825 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12826 IEMOP_HLP_MIN_486();
12827 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12828}
12829
12830
12831/** Opcode 0x0f 0xca. */
12832FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12833{
12834 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12835 IEMOP_HLP_MIN_486();
12836 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12837}
12838
12839
12840/** Opcode 0x0f 0xcb. */
12841FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12842{
12843 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12844 IEMOP_HLP_MIN_486();
12845 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12846}
12847
12848
12849/** Opcode 0x0f 0xcc. */
12850FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12851{
12852 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12853 IEMOP_HLP_MIN_486();
12854 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12855}
12856
12857
12858/** Opcode 0x0f 0xcd. */
12859FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12860{
12861 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12862 IEMOP_HLP_MIN_486();
12863 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12864}
12865
12866
12867/** Opcode 0x0f 0xce. */
12868FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12869{
12870 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12871 IEMOP_HLP_MIN_486();
12872 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12873}
12874
12875
12876/** Opcode 0x0f 0xcf. */
12877FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12878{
12879 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12880 IEMOP_HLP_MIN_486();
12881 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12882}
12883
12884
12885/* Opcode 0x0f 0xd0 - invalid */
12886
12887
12888/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12889FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12890{
12891 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12892 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12893}
12894
12895
12896/* Opcode 0xf3 0x0f 0xd0 - invalid */
12897
12898
12899/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12900FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12901{
12902 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12903 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12904}
12905
12906
12907
12908/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12909FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12910{
12911 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12912 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12913}
12914
12915/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12916FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12917{
12918 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12919 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12920}
12921
12922/* Opcode 0xf3 0x0f 0xd1 - invalid */
12923/* Opcode 0xf2 0x0f 0xd1 - invalid */
12924
12925/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12926FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12927{
12928 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12929 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12930}
12931
12932
12933/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12934FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12935{
12936 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12937 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12938}
12939
12940
12941/* Opcode 0xf3 0x0f 0xd2 - invalid */
12942/* Opcode 0xf2 0x0f 0xd2 - invalid */
12943
12944/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12945FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12946{
12947 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12948 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12949}
12950
12951
12952/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12953FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12954{
12955 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12956 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12957}
12958
12959
12960/* Opcode 0xf3 0x0f 0xd3 - invalid */
12961/* Opcode 0xf2 0x0f 0xd3 - invalid */
12962
12963
12964/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12965FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12966{
12967 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12968 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12969}
12970
12971
12972/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12973FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12974{
12975 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12976 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12977}
12978
12979
12980/* Opcode 0xf3 0x0f 0xd4 - invalid */
12981/* Opcode 0xf2 0x0f 0xd4 - invalid */
12982
12983/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12984FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12985{
12986 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12987 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12988}
12989
12990/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12991FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12992{
12993 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12994 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12995}
12996
12997
12998/* Opcode 0xf3 0x0f 0xd5 - invalid */
12999/* Opcode 0xf2 0x0f 0xd5 - invalid */
13000
13001/* Opcode 0x0f 0xd6 - invalid */
13002
13003/**
13004 * @opcode 0xd6
13005 * @oppfx 0x66
13006 * @opcpuid sse2
13007 * @opgroup og_sse2_pcksclr_datamove
13008 * @opxcpttype none
13009 * @optest op1=-1 op2=2 -> op1=2
13010 * @optest op1=0 op2=-42 -> op1=-42
13011 */
13012FNIEMOP_DEF(iemOp_movq_Wq_Vq)
13013{
13014 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13016 if (IEM_IS_MODRM_REG_MODE(bRm))
13017 {
13018 /*
13019 * Register, register.
13020 */
13021 IEM_MC_BEGIN(0, 2);
13022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13023 IEM_MC_LOCAL(uint64_t, uSrc);
13024
13025 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13026 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13027
13028 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13029 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
13030
13031 IEM_MC_ADVANCE_RIP_AND_FINISH();
13032 IEM_MC_END();
13033 }
13034 else
13035 {
13036 /*
13037 * Memory, register.
13038 */
13039 IEM_MC_BEGIN(0, 2);
13040 IEM_MC_LOCAL(uint64_t, uSrc);
13041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13042
13043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13045 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13046 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13047
13048 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13049 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13050
13051 IEM_MC_ADVANCE_RIP_AND_FINISH();
13052 IEM_MC_END();
13053 }
13054}
13055
13056
13057/**
13058 * @opcode 0xd6
13059 * @opcodesub 11 mr/reg
13060 * @oppfx f3
13061 * @opcpuid sse2
13062 * @opgroup og_sse2_simdint_datamove
13063 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13064 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13065 */
13066FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13067{
13068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13069 if (IEM_IS_MODRM_REG_MODE(bRm))
13070 {
13071 /*
13072 * Register, register.
13073 */
13074 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13075 IEM_MC_BEGIN(0, 1);
13076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13077 IEM_MC_LOCAL(uint64_t, uSrc);
13078
13079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13080 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13081 IEM_MC_FPU_TO_MMX_MODE();
13082
13083 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13084 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13085
13086 IEM_MC_ADVANCE_RIP_AND_FINISH();
13087 IEM_MC_END();
13088 }
13089
13090 /**
13091 * @opdone
13092 * @opmnemonic udf30fd6mem
13093 * @opcode 0xd6
13094 * @opcodesub !11 mr/reg
13095 * @oppfx f3
13096 * @opunused intel-modrm
13097 * @opcpuid sse
13098 * @optest ->
13099 */
13100 else
13101 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13102}
13103
13104
13105/**
13106 * @opcode 0xd6
13107 * @opcodesub 11 mr/reg
13108 * @oppfx f2
13109 * @opcpuid sse2
13110 * @opgroup og_sse2_simdint_datamove
13111 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13112 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13113 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13114 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13115 * @optest op1=-42 op2=0xfedcba9876543210
13116 * -> op1=0xfedcba9876543210 ftw=0xff
13117 */
13118FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13119{
13120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13121 if (IEM_IS_MODRM_REG_MODE(bRm))
13122 {
13123 /*
13124 * Register, register.
13125 */
13126 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13127 IEM_MC_BEGIN(0, 1);
13128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13129 IEM_MC_LOCAL(uint64_t, uSrc);
13130
13131 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13132 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13133 IEM_MC_FPU_TO_MMX_MODE();
13134
13135 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13136 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13137
13138 IEM_MC_ADVANCE_RIP_AND_FINISH();
13139 IEM_MC_END();
13140 }
13141
13142 /**
13143 * @opdone
13144 * @opmnemonic udf20fd6mem
13145 * @opcode 0xd6
13146 * @opcodesub !11 mr/reg
13147 * @oppfx f2
13148 * @opunused intel-modrm
13149 * @opcpuid sse
13150 * @optest ->
13151 */
13152 else
13153 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13154}
13155
13156
13157/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13158FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13159{
13160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13161 /* Docs says register only. */
13162 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13163 {
13164 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13165 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13166 IEM_MC_BEGIN(2, 0);
13167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13168 IEM_MC_ARG(uint64_t *, puDst, 0);
13169 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13170 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13171 IEM_MC_PREPARE_FPU_USAGE();
13172 IEM_MC_FPU_TO_MMX_MODE();
13173
13174 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13175 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13176 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13177
13178 IEM_MC_ADVANCE_RIP_AND_FINISH();
13179 IEM_MC_END();
13180 }
13181 else
13182 IEMOP_RAISE_INVALID_OPCODE_RET();
13183}
13184
13185
13186/** Opcode 0x66 0x0f 0xd7 - */
13187FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13188{
13189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13190 /* Docs says register only. */
13191 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13192 {
13193 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13194 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13195 IEM_MC_BEGIN(2, 0);
13196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13197 IEM_MC_ARG(uint64_t *, puDst, 0);
13198 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13199 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13200 IEM_MC_PREPARE_SSE_USAGE();
13201 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13202 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13203 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13204 IEM_MC_ADVANCE_RIP_AND_FINISH();
13205 IEM_MC_END();
13206 }
13207 else
13208 IEMOP_RAISE_INVALID_OPCODE_RET();
13209}
13210
13211
13212/* Opcode 0xf3 0x0f 0xd7 - invalid */
13213/* Opcode 0xf2 0x0f 0xd7 - invalid */
13214
13215
13216/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13217FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13218{
13219 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13220 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13221}
13222
13223
13224/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13225FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13226{
13227 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13228 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13229}
13230
13231
13232/* Opcode 0xf3 0x0f 0xd8 - invalid */
13233/* Opcode 0xf2 0x0f 0xd8 - invalid */
13234
13235/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13236FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13237{
13238 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13239 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13240}
13241
13242
13243/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13244FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13245{
13246 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13247 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13248}
13249
13250
13251/* Opcode 0xf3 0x0f 0xd9 - invalid */
13252/* Opcode 0xf2 0x0f 0xd9 - invalid */
13253
13254/** Opcode 0x0f 0xda - pminub Pq, Qq */
13255FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13256{
13257 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13258 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13259}
13260
13261
13262/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13263FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13264{
13265 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13266 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13267}
13268
13269/* Opcode 0xf3 0x0f 0xda - invalid */
13270/* Opcode 0xf2 0x0f 0xda - invalid */
13271
13272/** Opcode 0x0f 0xdb - pand Pq, Qq */
13273FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13274{
13275 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13276 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13277}
13278
13279
13280/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13281FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13282{
13283 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13284 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13285}
13286
13287
13288/* Opcode 0xf3 0x0f 0xdb - invalid */
13289/* Opcode 0xf2 0x0f 0xdb - invalid */
13290
13291/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13292FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13293{
13294 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13295 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13296}
13297
13298
13299/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13300FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13301{
13302 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13303 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13304}
13305
13306
13307/* Opcode 0xf3 0x0f 0xdc - invalid */
13308/* Opcode 0xf2 0x0f 0xdc - invalid */
13309
13310/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13311FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13312{
13313 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13314 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13315}
13316
13317
13318/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13319FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13320{
13321 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13322 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13323}
13324
13325
13326/* Opcode 0xf3 0x0f 0xdd - invalid */
13327/* Opcode 0xf2 0x0f 0xdd - invalid */
13328
13329/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13330FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13331{
13332 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13333 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13334}
13335
13336
13337/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13338FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13339{
13340 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13341 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13342}
13343
13344/* Opcode 0xf3 0x0f 0xde - invalid */
13345/* Opcode 0xf2 0x0f 0xde - invalid */
13346
13347
13348/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13349FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13350{
13351 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13352 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13353}
13354
13355
13356/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13357FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13358{
13359 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13360 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13361}
13362
13363
13364/* Opcode 0xf3 0x0f 0xdf - invalid */
13365/* Opcode 0xf2 0x0f 0xdf - invalid */
13366
13367/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13368FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13369{
13370 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13371 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13372}
13373
13374
13375/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13376FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13377{
13378 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13379 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13380}
13381
13382
13383/* Opcode 0xf3 0x0f 0xe0 - invalid */
13384/* Opcode 0xf2 0x0f 0xe0 - invalid */
13385
13386/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13387FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13388{
13389 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13390 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13391}
13392
13393
13394/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13395FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13396{
13397 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13398 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13399}
13400
13401
13402/* Opcode 0xf3 0x0f 0xe1 - invalid */
13403/* Opcode 0xf2 0x0f 0xe1 - invalid */
13404
13405/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13406FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13407{
13408 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13409 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13410}
13411
13412
13413/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13414FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13415{
13416 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13417 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13418}
13419
13420
13421/* Opcode 0xf3 0x0f 0xe2 - invalid */
13422/* Opcode 0xf2 0x0f 0xe2 - invalid */
13423
13424/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13425FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13426{
13427 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13428 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13429}
13430
13431
13432/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13433FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13434{
13435 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13436 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13437}
13438
13439
13440/* Opcode 0xf3 0x0f 0xe3 - invalid */
13441/* Opcode 0xf2 0x0f 0xe3 - invalid */
13442
13443/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13444FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13445{
13446 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13447 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13448}
13449
13450
13451/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13452FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13453{
13454 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13455 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13456}
13457
13458
13459/* Opcode 0xf3 0x0f 0xe4 - invalid */
13460/* Opcode 0xf2 0x0f 0xe4 - invalid */
13461
13462/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13463FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13464{
13465 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13466 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13467}
13468
13469
13470/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13471FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13472{
13473 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13474 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13475}
13476
13477
13478/* Opcode 0xf3 0x0f 0xe5 - invalid */
13479/* Opcode 0xf2 0x0f 0xe5 - invalid */
13480/* Opcode 0x0f 0xe6 - invalid */
13481
13482
13483/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13484FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13485{
13486 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13487 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13488}
13489
13490
13491/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13492FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13493{
13494 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13495 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13496}
13497
13498
13499/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13500FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13501{
13502 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13503 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13504}
13505
13506
13507/**
13508 * @opcode 0xe7
13509 * @opcodesub !11 mr/reg
13510 * @oppfx none
13511 * @opcpuid sse
13512 * @opgroup og_sse1_cachect
13513 * @opxcpttype none
13514 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13515 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13516 */
13517FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13518{
13519 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13521 if (IEM_IS_MODRM_MEM_MODE(bRm))
13522 {
13523 /* Register, memory. */
13524 IEM_MC_BEGIN(0, 2);
13525 IEM_MC_LOCAL(uint64_t, uSrc);
13526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13527
13528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13530 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13531 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13532 IEM_MC_FPU_TO_MMX_MODE();
13533
13534 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13535 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13536
13537 IEM_MC_ADVANCE_RIP_AND_FINISH();
13538 IEM_MC_END();
13539 }
13540 /**
13541 * @opdone
13542 * @opmnemonic ud0fe7reg
13543 * @opcode 0xe7
13544 * @opcodesub 11 mr/reg
13545 * @oppfx none
13546 * @opunused immediate
13547 * @opcpuid sse
13548 * @optest ->
13549 */
13550 else
13551 IEMOP_RAISE_INVALID_OPCODE_RET();
13552}
13553
13554/**
13555 * @opcode 0xe7
13556 * @opcodesub !11 mr/reg
13557 * @oppfx 0x66
13558 * @opcpuid sse2
13559 * @opgroup og_sse2_cachect
13560 * @opxcpttype 1
13561 * @optest op1=-1 op2=2 -> op1=2
13562 * @optest op1=0 op2=-42 -> op1=-42
13563 */
13564FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13565{
13566 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13568 if (IEM_IS_MODRM_MEM_MODE(bRm))
13569 {
13570 /* Register, memory. */
13571 IEM_MC_BEGIN(0, 2);
13572 IEM_MC_LOCAL(RTUINT128U, uSrc);
13573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13574
13575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13577 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13579
13580 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13581 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13582
13583 IEM_MC_ADVANCE_RIP_AND_FINISH();
13584 IEM_MC_END();
13585 }
13586
13587 /**
13588 * @opdone
13589 * @opmnemonic ud660fe7reg
13590 * @opcode 0xe7
13591 * @opcodesub 11 mr/reg
13592 * @oppfx 0x66
13593 * @opunused immediate
13594 * @opcpuid sse
13595 * @optest ->
13596 */
13597 else
13598 IEMOP_RAISE_INVALID_OPCODE_RET();
13599}
13600
13601/* Opcode 0xf3 0x0f 0xe7 - invalid */
13602/* Opcode 0xf2 0x0f 0xe7 - invalid */
13603
13604
13605/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13606FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13607{
13608 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13609 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13610}
13611
13612
13613/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13614FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13615{
13616 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13617 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13618}
13619
13620
13621/* Opcode 0xf3 0x0f 0xe8 - invalid */
13622/* Opcode 0xf2 0x0f 0xe8 - invalid */
13623
13624/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13625FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13626{
13627 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13628 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13629}
13630
13631
13632/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13633FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13634{
13635 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13636 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13637}
13638
13639
13640/* Opcode 0xf3 0x0f 0xe9 - invalid */
13641/* Opcode 0xf2 0x0f 0xe9 - invalid */
13642
13643
13644/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13645FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13646{
13647 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13648 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13649}
13650
13651
13652/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13653FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13654{
13655 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13656 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13657}
13658
13659
13660/* Opcode 0xf3 0x0f 0xea - invalid */
13661/* Opcode 0xf2 0x0f 0xea - invalid */
13662
13663
13664/** Opcode 0x0f 0xeb - por Pq, Qq */
13665FNIEMOP_DEF(iemOp_por_Pq_Qq)
13666{
13667 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13668 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13669}
13670
13671
13672/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13673FNIEMOP_DEF(iemOp_por_Vx_Wx)
13674{
13675 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13676 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13677}
13678
13679
13680/* Opcode 0xf3 0x0f 0xeb - invalid */
13681/* Opcode 0xf2 0x0f 0xeb - invalid */
13682
13683/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13684FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13685{
13686 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13687 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13688}
13689
13690
13691/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13692FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13693{
13694 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13695 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13696}
13697
13698
13699/* Opcode 0xf3 0x0f 0xec - invalid */
13700/* Opcode 0xf2 0x0f 0xec - invalid */
13701
13702/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13703FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13704{
13705 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13706 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13707}
13708
13709
13710/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13711FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13712{
13713 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13714 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13715}
13716
13717
13718/* Opcode 0xf3 0x0f 0xed - invalid */
13719/* Opcode 0xf2 0x0f 0xed - invalid */
13720
13721
13722/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13723FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13724{
13725 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13726 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13727}
13728
13729
13730/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13731FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13732{
13733 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13734 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13735}
13736
13737
13738/* Opcode 0xf3 0x0f 0xee - invalid */
13739/* Opcode 0xf2 0x0f 0xee - invalid */
13740
13741
13742/** Opcode 0x0f 0xef - pxor Pq, Qq */
13743FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13744{
13745 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13746 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13747}
13748
13749
13750/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13751FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13752{
13753 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13754 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13755}
13756
13757
13758/* Opcode 0xf3 0x0f 0xef - invalid */
13759/* Opcode 0xf2 0x0f 0xef - invalid */
13760
13761/* Opcode 0x0f 0xf0 - invalid */
13762/* Opcode 0x66 0x0f 0xf0 - invalid */
13763
13764
13765/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13766FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13767{
13768 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13770 if (IEM_IS_MODRM_REG_MODE(bRm))
13771 {
13772 /*
13773 * Register, register - (not implemented, assuming it raises \#UD).
13774 */
13775 IEMOP_RAISE_INVALID_OPCODE_RET();
13776 }
13777 else
13778 {
13779 /*
13780 * Register, memory.
13781 */
13782 IEM_MC_BEGIN(0, 2);
13783 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13785
13786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13788 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13789 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13790 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13791 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13792
13793 IEM_MC_ADVANCE_RIP_AND_FINISH();
13794 IEM_MC_END();
13795 }
13796}
13797
13798
13799/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13800FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13801{
13802 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13803 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13804}
13805
13806
13807/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13808FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13809{
13810 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13811 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13812}
13813
13814
13815/* Opcode 0xf2 0x0f 0xf1 - invalid */
13816
13817/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13818FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13819{
13820 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13821 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13822}
13823
13824
13825/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13826FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13827{
13828 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13829 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13830}
13831
13832
13833/* Opcode 0xf2 0x0f 0xf2 - invalid */
13834
13835/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13836FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13837{
13838 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13839 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13840}
13841
13842
13843/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13844FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13845{
13846 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13847 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13848}
13849
13850/* Opcode 0xf2 0x0f 0xf3 - invalid */
13851
13852/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13853FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13854{
13855 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13856 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13857}
13858
13859
13860/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13861FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13862{
13863 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13864 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13865}
13866
13867
13868/* Opcode 0xf2 0x0f 0xf4 - invalid */
13869
13870/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13871FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13872{
13873 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13874 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13875}
13876
13877
13878/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13879FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13880{
13881 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13882 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13883}
13884
13885/* Opcode 0xf2 0x0f 0xf5 - invalid */
13886
13887/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13888FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13889{
13890 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13891 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13892}
13893
13894
13895/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13896FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13897{
13898 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13899 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13900}
13901
13902
13903/* Opcode 0xf2 0x0f 0xf6 - invalid */
13904
13905/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13906FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13907/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13908FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13909/* Opcode 0xf2 0x0f 0xf7 - invalid */
13910
13911
13912/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13913FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13914{
13915 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13916 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13917}
13918
13919
13920/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13921FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13922{
13923 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13924 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13925}
13926
13927
13928/* Opcode 0xf2 0x0f 0xf8 - invalid */
13929
13930
13931/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13932FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13933{
13934 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13935 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13936}
13937
13938
13939/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13940FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13941{
13942 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13943 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13944}
13945
13946
13947/* Opcode 0xf2 0x0f 0xf9 - invalid */
13948
13949
13950/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13951FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13952{
13953 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13954 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13955}
13956
13957
13958/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13959FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13960{
13961 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13962 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13963}
13964
13965
13966/* Opcode 0xf2 0x0f 0xfa - invalid */
13967
13968
13969/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13970FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13971{
13972 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13973 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13974}
13975
13976
13977/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13978FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13979{
13980 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13981 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13982}
13983
13984
13985/* Opcode 0xf2 0x0f 0xfb - invalid */
13986
13987
13988/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13989FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13990{
13991 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13992 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13993}
13994
13995
13996/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13997FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13998{
13999 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14000 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
14001}
14002
14003
14004/* Opcode 0xf2 0x0f 0xfc - invalid */
14005
14006
14007/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14008FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14009{
14010 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14011 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
14012}
14013
14014
14015/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14016FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14017{
14018 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14019 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
14020}
14021
14022
14023/* Opcode 0xf2 0x0f 0xfd - invalid */
14024
14025
14026/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14027FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14028{
14029 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14030 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
14031}
14032
14033
14034/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14035FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14036{
14037 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14038 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
14039}
14040
14041
14042/* Opcode 0xf2 0x0f 0xfe - invalid */
14043
14044
14045/** Opcode **** 0x0f 0xff - UD0 */
14046FNIEMOP_DEF(iemOp_ud0)
14047{
14048 IEMOP_MNEMONIC(ud0, "ud0");
14049 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14050 {
14051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14052 if (IEM_IS_MODRM_MEM_MODE(bRm))
14053 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14054 }
14055 IEMOP_HLP_DONE_DECODING();
14056 IEMOP_RAISE_INVALID_OPCODE_RET();
14057}
14058
14059
14060
14061/**
14062 * Two byte opcode map, first byte 0x0f.
14063 *
14064 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14065 * check if it needs updating as well when making changes.
14066 */
14067const PFNIEMOP g_apfnTwoByteMap[] =
14068{
14069 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14070 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14071 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14072 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14073 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14074 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14075 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14076 /* 0x06 */ IEMOP_X4(iemOp_clts),
14077 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14078 /* 0x08 */ IEMOP_X4(iemOp_invd),
14079 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14080 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14081 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14082 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14083 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14084 /* 0x0e */ IEMOP_X4(iemOp_femms),
14085 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14086
14087 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14088 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14089 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14090 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14091 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14092 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14093 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14094 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14096 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14097 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14098 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14099 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14100 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14101 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14102 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14103
14104 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14105 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14106 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14107 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14108 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14109 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14110 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14111 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14112 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14113 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14114 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14115 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14117 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14118 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120
14121 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14122 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14123 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14124 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14125 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14126 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14127 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14128 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14129 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14130 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14131 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14132 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14133 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14134 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14135 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14136 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14137
14138 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14139 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14140 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14141 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14142 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14143 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14144 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14145 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14146 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14147 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14148 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14149 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14150 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14151 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14152 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14153 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14154
14155 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14156 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14157 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14158 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14159 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14160 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14161 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14162 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14163 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14164 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14165 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14166 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14167 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14168 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14169 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14170 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14171
14172 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14173 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14174 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14175 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14176 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14177 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14178 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14179 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14180 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14181 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14182 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14188
14189 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14190 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14191 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14192 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14193 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14194 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14195 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14196 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14197
14198 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14199 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14200 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14201 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14202 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14203 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14204 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14205 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14206
14207 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14208 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14209 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14210 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14211 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14212 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14213 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14214 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14215 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14216 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14217 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14218 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14219 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14220 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14221 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14222 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14223
14224 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14225 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14226 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14227 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14228 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14229 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14230 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14231 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14232 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14233 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14234 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14235 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14236 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14237 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14238 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14239 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14240
14241 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14242 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14243 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14244 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14245 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14246 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14247 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14248 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14249 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14250 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14251 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14252 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14253 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14254 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14255 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14256 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14257
14258 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14259 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14260 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14261 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14262 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14263 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14264 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14265 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14266 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14267 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14268 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14269 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14270 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14271 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14272 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14273 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14274
14275 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14276 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14277 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14278 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14279 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14280 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14281 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14282 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14283 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14284 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14285 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14286 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14287 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14288 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14289 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14290 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14291
14292 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14293 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14294 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14295 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14296 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14297 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14298 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14299 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14300 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14301 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14302 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14303 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308
14309 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14316 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14317 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14319 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325
14326 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14327 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14334 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14335 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14336 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14337 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341 /* 0xff */ IEMOP_X4(iemOp_ud0),
14342};
14343AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14344
14345/** @} */
14346
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette