VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 99748

Last change on this file since 99748 was 99337, checked in by vboxsync, 21 months ago

VMM/IEM: IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT -> IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT, since the CPUID check was removed they are identical. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 496.9 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 99337 2023-04-07 12:33:48Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
115 IEM_MC_BEGIN(2, 0);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
175 IEM_MC_BEGIN(2, 0);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
238 IEM_MC_BEGIN(2, 0);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
298 IEM_MC_BEGIN(2, 0);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
360 IEM_MC_BEGIN(2, 0);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
415 IEM_MC_BEGIN(2, 0);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
473 IEM_MC_BEGIN(2, 0);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
526 IEM_MC_BEGIN(2, 0);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
588 IEM_MC_BEGIN(2, 0);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
648 IEM_MC_BEGIN(2, 0);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
708 IEM_MC_BEGIN(2, 0);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
770 IEM_MC_BEGIN(2, 0);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
830 IEM_MC_BEGIN(3, 1);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
894 IEM_MC_BEGIN(3, 1);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
958 IEM_MC_BEGIN(3, 1);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1022 IEM_MC_BEGIN(3, 1);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1086 IEM_MC_BEGIN(2, 0);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1146 IEM_MC_BEGIN(3, 1);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1229 }
1230
1231 /* Ignore operand size here, memory refs are always 16-bit. */
1232 IEM_MC_BEGIN(2, 0);
1233 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1238 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1253 IEM_MC_BEGIN(1, 0);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1284 IEM_MC_BEGIN(1, 0);
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1314 IEM_MC_BEGIN(2, 0);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 IEMOP_HLP_MIN_286();
1341 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1342}
1343
1344
1345/** Opcode 0x0f 0x00 /5. */
1346FNIEMOPRM_DEF(iemOp_Grp6_verw)
1347{
1348 IEMOP_MNEMONIC(verw, "verw Ew");
1349 IEMOP_HLP_MIN_286();
1350 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1351}
1352
1353
1354/**
1355 * Group 6 jump table.
1356 */
1357IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1358{
1359 iemOp_Grp6_sldt,
1360 iemOp_Grp6_str,
1361 iemOp_Grp6_lldt,
1362 iemOp_Grp6_ltr,
1363 iemOp_Grp6_verr,
1364 iemOp_Grp6_verw,
1365 iemOp_InvalidWithRM,
1366 iemOp_InvalidWithRM
1367};
1368
1369/** Opcode 0x0f 0x00. */
1370FNIEMOP_DEF(iemOp_Grp6)
1371{
1372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1373 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1374}
1375
1376
1377/** Opcode 0x0f 0x01 /0. */
1378FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1379{
1380 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1381 IEMOP_HLP_MIN_286();
1382 IEMOP_HLP_64BIT_OP_SIZE();
1383 IEM_MC_BEGIN(2, 1);
1384 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1385 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1389 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1390 IEM_MC_END();
1391}
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395FNIEMOP_DEF(iemOp_Grp7_vmcall)
1396{
1397 IEMOP_MNEMONIC(vmcall, "vmcall");
1398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1399
1400 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1401 want all hypercalls regardless of instruction used, and if a
1402 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1403 (NEM/win makes ASSUMPTIONS about this behavior.) */
1404 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1405}
1406
1407
1408/** Opcode 0x0f 0x01 /0. */
1409#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1410FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1411{
1412 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1413 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1414 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1415 IEMOP_HLP_DONE_DECODING();
1416 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1417}
1418#else
1419FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1420{
1421 IEMOP_BITCH_ABOUT_STUB();
1422 return IEMOP_RAISE_INVALID_OPCODE();
1423}
1424#endif
1425
1426
1427/** Opcode 0x0f 0x01 /0. */
1428#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1429FNIEMOP_DEF(iemOp_Grp7_vmresume)
1430{
1431 IEMOP_MNEMONIC(vmresume, "vmresume");
1432 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1433 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1434 IEMOP_HLP_DONE_DECODING();
1435 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1436}
1437#else
1438FNIEMOP_DEF(iemOp_Grp7_vmresume)
1439{
1440 IEMOP_BITCH_ABOUT_STUB();
1441 return IEMOP_RAISE_INVALID_OPCODE();
1442}
1443#endif
1444
1445
1446/** Opcode 0x0f 0x01 /0. */
1447#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1448FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1449{
1450 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1451 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1452 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1453 IEMOP_HLP_DONE_DECODING();
1454 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1455}
1456#else
1457FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1458{
1459 IEMOP_BITCH_ABOUT_STUB();
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462#endif
1463
1464
1465/** Opcode 0x0f 0x01 /1. */
1466FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1467{
1468 IEMOP_MNEMONIC(sidt, "sidt Ms");
1469 IEMOP_HLP_MIN_286();
1470 IEMOP_HLP_64BIT_OP_SIZE();
1471 IEM_MC_BEGIN(2, 1);
1472 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1473 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1476 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1477 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1478 IEM_MC_END();
1479}
1480
1481
1482/** Opcode 0x0f 0x01 /1. */
1483FNIEMOP_DEF(iemOp_Grp7_monitor)
1484{
1485 IEMOP_MNEMONIC(monitor, "monitor");
1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1487 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1488}
1489
1490
1491/** Opcode 0x0f 0x01 /1. */
1492FNIEMOP_DEF(iemOp_Grp7_mwait)
1493{
1494 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1496 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1497}
1498
1499
1500/** Opcode 0x0f 0x01 /2. */
1501FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1502{
1503 IEMOP_MNEMONIC(lgdt, "lgdt");
1504 IEMOP_HLP_64BIT_OP_SIZE();
1505 IEM_MC_BEGIN(3, 1);
1506 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1507 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1508 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1511 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1512 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1513 IEM_MC_END();
1514}
1515
1516
1517/** Opcode 0x0f 0x01 0xd0. */
1518FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1519{
1520 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1521 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1522 {
1523 /** @todo r=ramshankar: We should use
1524 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1525 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1526 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1527 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1528 }
1529 return IEMOP_RAISE_INVALID_OPCODE();
1530}
1531
1532
1533/** Opcode 0x0f 0x01 0xd1. */
1534FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1535{
1536 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1537 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1538 {
1539 /** @todo r=ramshankar: We should use
1540 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1541 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1543 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1544 }
1545 return IEMOP_RAISE_INVALID_OPCODE();
1546}
1547
1548
1549/** Opcode 0x0f 0x01 /3. */
1550FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1551{
1552 IEMOP_MNEMONIC(lidt, "lidt");
1553 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1554 ? IEMMODE_64BIT
1555 : pVCpu->iem.s.enmEffOpSize;
1556 IEM_MC_BEGIN(3, 1);
1557 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1558 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1559 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1562 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1563 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1564 IEM_MC_END();
1565}
1566
1567
1568/** Opcode 0x0f 0x01 0xd8. */
1569#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1570FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1571{
1572 IEMOP_MNEMONIC(vmrun, "vmrun");
1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1574 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1575}
1576#else
1577FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1578#endif
1579
1580/** Opcode 0x0f 0x01 0xd9. */
1581FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1582{
1583 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1585
1586 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1587 want all hypercalls regardless of instruction used, and if a
1588 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1589 (NEM/win makes ASSUMPTIONS about this behavior.) */
1590 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1591}
1592
1593/** Opcode 0x0f 0x01 0xda. */
1594#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1595FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1596{
1597 IEMOP_MNEMONIC(vmload, "vmload");
1598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1599 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1600}
1601#else
1602FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1603#endif
1604
1605
1606/** Opcode 0x0f 0x01 0xdb. */
1607#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1608FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1609{
1610 IEMOP_MNEMONIC(vmsave, "vmsave");
1611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1612 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1613}
1614#else
1615FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1616#endif
1617
1618
1619/** Opcode 0x0f 0x01 0xdc. */
1620#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1621FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1622{
1623 IEMOP_MNEMONIC(stgi, "stgi");
1624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1625 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1626}
1627#else
1628FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1629#endif
1630
1631
1632/** Opcode 0x0f 0x01 0xdd. */
1633#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1634FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1635{
1636 IEMOP_MNEMONIC(clgi, "clgi");
1637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1638 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1639}
1640#else
1641FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1642#endif
1643
1644
1645/** Opcode 0x0f 0x01 0xdf. */
1646#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1647FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1648{
1649 IEMOP_MNEMONIC(invlpga, "invlpga");
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1651 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1652}
1653#else
1654FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1655#endif
1656
1657
1658/** Opcode 0x0f 0x01 0xde. */
1659#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1660FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1661{
1662 IEMOP_MNEMONIC(skinit, "skinit");
1663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1664 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1665}
1666#else
1667FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1668#endif
1669
1670
1671/** Opcode 0x0f 0x01 /4. */
1672FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1673{
1674 IEMOP_MNEMONIC(smsw, "smsw");
1675 IEMOP_HLP_MIN_286();
1676 if (IEM_IS_MODRM_REG_MODE(bRm))
1677 {
1678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1679 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1680 }
1681
1682 /* Ignore operand size here, memory refs are always 16-bit. */
1683 IEM_MC_BEGIN(2, 0);
1684 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1685 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1688 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1689 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1690 IEM_MC_END();
1691}
1692
1693
1694/** Opcode 0x0f 0x01 /6. */
1695FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1696{
1697 /* The operand size is effectively ignored, all is 16-bit and only the
1698 lower 3-bits are used. */
1699 IEMOP_MNEMONIC(lmsw, "lmsw");
1700 IEMOP_HLP_MIN_286();
1701 if (IEM_IS_MODRM_REG_MODE(bRm))
1702 {
1703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1704 IEM_MC_BEGIN(2, 0);
1705 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1706 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1707 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1708 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1709 IEM_MC_END();
1710 }
1711 else
1712 {
1713 IEM_MC_BEGIN(2, 0);
1714 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1715 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1718 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1719 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1720 IEM_MC_END();
1721 }
1722}
1723
1724
1725/** Opcode 0x0f 0x01 /7. */
1726FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1727{
1728 IEMOP_MNEMONIC(invlpg, "invlpg");
1729 IEMOP_HLP_MIN_486();
1730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1731 IEM_MC_BEGIN(1, 1);
1732 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1734 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1735 IEM_MC_END();
1736}
1737
1738
1739/** Opcode 0x0f 0x01 0xf8. */
1740FNIEMOP_DEF(iemOp_Grp7_swapgs)
1741{
1742 IEMOP_MNEMONIC(swapgs, "swapgs");
1743 IEMOP_HLP_ONLY_64BIT();
1744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1745 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1746}
1747
1748
1749/** Opcode 0x0f 0x01 0xf9. */
1750FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1751{
1752 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1754 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1755}
1756
1757
1758/**
1759 * Group 7 jump table, memory variant.
1760 */
1761IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1762{
1763 iemOp_Grp7_sgdt,
1764 iemOp_Grp7_sidt,
1765 iemOp_Grp7_lgdt,
1766 iemOp_Grp7_lidt,
1767 iemOp_Grp7_smsw,
1768 iemOp_InvalidWithRM,
1769 iemOp_Grp7_lmsw,
1770 iemOp_Grp7_invlpg
1771};
1772
1773
1774/** Opcode 0x0f 0x01. */
1775FNIEMOP_DEF(iemOp_Grp7)
1776{
1777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1778 if (IEM_IS_MODRM_MEM_MODE(bRm))
1779 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1780
1781 switch (IEM_GET_MODRM_REG_8(bRm))
1782 {
1783 case 0:
1784 switch (IEM_GET_MODRM_RM_8(bRm))
1785 {
1786 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1787 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1788 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1789 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1790 }
1791 return IEMOP_RAISE_INVALID_OPCODE();
1792
1793 case 1:
1794 switch (IEM_GET_MODRM_RM_8(bRm))
1795 {
1796 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1797 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1798 }
1799 return IEMOP_RAISE_INVALID_OPCODE();
1800
1801 case 2:
1802 switch (IEM_GET_MODRM_RM_8(bRm))
1803 {
1804 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1805 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1806 }
1807 return IEMOP_RAISE_INVALID_OPCODE();
1808
1809 case 3:
1810 switch (IEM_GET_MODRM_RM_8(bRm))
1811 {
1812 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1813 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1814 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1815 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1816 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1817 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1818 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1819 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1821 }
1822
1823 case 4:
1824 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1825
1826 case 5:
1827 return IEMOP_RAISE_INVALID_OPCODE();
1828
1829 case 6:
1830 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1831
1832 case 7:
1833 switch (IEM_GET_MODRM_RM_8(bRm))
1834 {
1835 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1836 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1837 }
1838 return IEMOP_RAISE_INVALID_OPCODE();
1839
1840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1841 }
1842}
1843
1844/** Opcode 0x0f 0x00 /3. */
1845FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1846{
1847 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1849
1850 if (IEM_IS_MODRM_REG_MODE(bRm))
1851 {
1852 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1853 switch (pVCpu->iem.s.enmEffOpSize)
1854 {
1855 case IEMMODE_16BIT:
1856 {
1857 IEM_MC_BEGIN(3, 0);
1858 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1859 IEM_MC_ARG(uint16_t, u16Sel, 1);
1860 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1861
1862 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1863 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1864 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1865
1866 IEM_MC_END();
1867 }
1868
1869 case IEMMODE_32BIT:
1870 case IEMMODE_64BIT:
1871 {
1872 IEM_MC_BEGIN(3, 0);
1873 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1874 IEM_MC_ARG(uint16_t, u16Sel, 1);
1875 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1876
1877 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1878 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1879 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1880
1881 IEM_MC_END();
1882 }
1883
1884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1885 }
1886 }
1887 else
1888 {
1889 switch (pVCpu->iem.s.enmEffOpSize)
1890 {
1891 case IEMMODE_16BIT:
1892 {
1893 IEM_MC_BEGIN(3, 1);
1894 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1895 IEM_MC_ARG(uint16_t, u16Sel, 1);
1896 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1898
1899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1900 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1901
1902 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1904 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1905
1906 IEM_MC_END();
1907 }
1908
1909 case IEMMODE_32BIT:
1910 case IEMMODE_64BIT:
1911 {
1912 IEM_MC_BEGIN(3, 1);
1913 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1914 IEM_MC_ARG(uint16_t, u16Sel, 1);
1915 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1917
1918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1919 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1920/** @todo testcase: make sure it's a 16-bit read. */
1921
1922 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1923 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1924 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1925
1926 IEM_MC_END();
1927 }
1928
1929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1930 }
1931 }
1932}
1933
1934
1935
1936/** Opcode 0x0f 0x02. */
1937FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1938{
1939 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1940 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1941}
1942
1943
1944/** Opcode 0x0f 0x03. */
1945FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1946{
1947 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1948 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1949}
1950
1951
1952/** Opcode 0x0f 0x05. */
1953FNIEMOP_DEF(iemOp_syscall)
1954{
1955 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1957 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1958}
1959
1960
1961/** Opcode 0x0f 0x06. */
1962FNIEMOP_DEF(iemOp_clts)
1963{
1964 IEMOP_MNEMONIC(clts, "clts");
1965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1966 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1967}
1968
1969
1970/** Opcode 0x0f 0x07. */
1971FNIEMOP_DEF(iemOp_sysret)
1972{
1973 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1975 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1976}
1977
1978
1979/** Opcode 0x0f 0x08. */
1980FNIEMOP_DEF(iemOp_invd)
1981{
1982 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1983 IEMOP_HLP_MIN_486();
1984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1985 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1986}
1987
1988
1989/** Opcode 0x0f 0x09. */
1990FNIEMOP_DEF(iemOp_wbinvd)
1991{
1992 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1993 IEMOP_HLP_MIN_486();
1994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1996}
1997
1998
1999/** Opcode 0x0f 0x0b. */
2000FNIEMOP_DEF(iemOp_ud2)
2001{
2002 IEMOP_MNEMONIC(ud2, "ud2");
2003 return IEMOP_RAISE_INVALID_OPCODE();
2004}
2005
2006/** Opcode 0x0f 0x0d. */
2007FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2008{
2009 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2010 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2011 {
2012 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2013 return IEMOP_RAISE_INVALID_OPCODE();
2014 }
2015
2016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2017 if (IEM_IS_MODRM_REG_MODE(bRm))
2018 {
2019 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2020 return IEMOP_RAISE_INVALID_OPCODE();
2021 }
2022
2023 switch (IEM_GET_MODRM_REG_8(bRm))
2024 {
2025 case 2: /* Aliased to /0 for the time being. */
2026 case 4: /* Aliased to /0 for the time being. */
2027 case 5: /* Aliased to /0 for the time being. */
2028 case 6: /* Aliased to /0 for the time being. */
2029 case 7: /* Aliased to /0 for the time being. */
2030 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2031 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2032 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2034 }
2035
2036 IEM_MC_BEGIN(0, 1);
2037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2040 /* Currently a NOP. */
2041 NOREF(GCPtrEffSrc);
2042 IEM_MC_ADVANCE_RIP_AND_FINISH();
2043 IEM_MC_END();
2044}
2045
2046
2047/** Opcode 0x0f 0x0e. */
2048FNIEMOP_DEF(iemOp_femms)
2049{
2050 IEMOP_MNEMONIC(femms, "femms");
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052
2053 IEM_MC_BEGIN(0,0);
2054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2055 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2056 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2057 IEM_MC_FPU_FROM_MMX_MODE();
2058 IEM_MC_ADVANCE_RIP_AND_FINISH();
2059 IEM_MC_END();
2060}
2061
2062
2063/** Opcode 0x0f 0x0f. */
2064FNIEMOP_DEF(iemOp_3Dnow)
2065{
2066 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2067 {
2068 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2069 return IEMOP_RAISE_INVALID_OPCODE();
2070 }
2071
2072#ifdef IEM_WITH_3DNOW
2073 /* This is pretty sparse, use switch instead of table. */
2074 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2075 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2076#else
2077 IEMOP_BITCH_ABOUT_STUB();
2078 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2079#endif
2080}
2081
2082
2083/**
2084 * @opcode 0x10
2085 * @oppfx none
2086 * @opcpuid sse
2087 * @opgroup og_sse_simdfp_datamove
2088 * @opxcpttype 4UA
2089 * @optest op1=1 op2=2 -> op1=2
2090 * @optest op1=0 op2=-22 -> op1=-22
2091 */
2092FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2093{
2094 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2096 if (IEM_IS_MODRM_REG_MODE(bRm))
2097 {
2098 /*
2099 * XMM128, XMM128.
2100 */
2101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2102 IEM_MC_BEGIN(0, 0);
2103 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2104 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2105 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2106 IEM_GET_MODRM_RM(pVCpu, bRm));
2107 IEM_MC_ADVANCE_RIP_AND_FINISH();
2108 IEM_MC_END();
2109 }
2110 else
2111 {
2112 /*
2113 * XMM128, [mem128].
2114 */
2115 IEM_MC_BEGIN(0, 2);
2116 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2118
2119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2121 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2122 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2123
2124 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2125 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2126
2127 IEM_MC_ADVANCE_RIP_AND_FINISH();
2128 IEM_MC_END();
2129 }
2130
2131}
2132
2133
2134/**
2135 * @opcode 0x10
2136 * @oppfx 0x66
2137 * @opcpuid sse2
2138 * @opgroup og_sse2_pcksclr_datamove
2139 * @opxcpttype 4UA
2140 * @optest op1=1 op2=2 -> op1=2
2141 * @optest op1=0 op2=-42 -> op1=-42
2142 */
2143FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2144{
2145 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2147 if (IEM_IS_MODRM_REG_MODE(bRm))
2148 {
2149 /*
2150 * XMM128, XMM128.
2151 */
2152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2153 IEM_MC_BEGIN(0, 0);
2154 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2156 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2157 IEM_GET_MODRM_RM(pVCpu, bRm));
2158 IEM_MC_ADVANCE_RIP_AND_FINISH();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 /*
2164 * XMM128, [mem128].
2165 */
2166 IEM_MC_BEGIN(0, 2);
2167 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2169
2170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2172 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2173 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2174
2175 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2176 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2177
2178 IEM_MC_ADVANCE_RIP_AND_FINISH();
2179 IEM_MC_END();
2180 }
2181}
2182
2183
2184/**
2185 * @opcode 0x10
2186 * @oppfx 0xf3
2187 * @opcpuid sse
2188 * @opgroup og_sse_simdfp_datamove
2189 * @opxcpttype 5
2190 * @optest op1=1 op2=2 -> op1=2
2191 * @optest op1=0 op2=-22 -> op1=-22
2192 */
2193FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2194{
2195 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2197 if (IEM_IS_MODRM_REG_MODE(bRm))
2198 {
2199 /*
2200 * XMM32, XMM32.
2201 */
2202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2203 IEM_MC_BEGIN(0, 1);
2204 IEM_MC_LOCAL(uint32_t, uSrc);
2205
2206 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2207 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2208 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2209 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2210
2211 IEM_MC_ADVANCE_RIP_AND_FINISH();
2212 IEM_MC_END();
2213 }
2214 else
2215 {
2216 /*
2217 * XMM128, [mem32].
2218 */
2219 IEM_MC_BEGIN(0, 2);
2220 IEM_MC_LOCAL(uint32_t, uSrc);
2221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2222
2223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2225 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2226 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2227
2228 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2229 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2230
2231 IEM_MC_ADVANCE_RIP_AND_FINISH();
2232 IEM_MC_END();
2233 }
2234}
2235
2236
2237/**
2238 * @opcode 0x10
2239 * @oppfx 0xf2
2240 * @opcpuid sse2
2241 * @opgroup og_sse2_pcksclr_datamove
2242 * @opxcpttype 5
2243 * @optest op1=1 op2=2 -> op1=2
2244 * @optest op1=0 op2=-42 -> op1=-42
2245 */
2246FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2247{
2248 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2250 if (IEM_IS_MODRM_REG_MODE(bRm))
2251 {
2252 /*
2253 * XMM64, XMM64.
2254 */
2255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2256 IEM_MC_BEGIN(0, 1);
2257 IEM_MC_LOCAL(uint64_t, uSrc);
2258
2259 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2260 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2261 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2262 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2263
2264 IEM_MC_ADVANCE_RIP_AND_FINISH();
2265 IEM_MC_END();
2266 }
2267 else
2268 {
2269 /*
2270 * XMM128, [mem64].
2271 */
2272 IEM_MC_BEGIN(0, 2);
2273 IEM_MC_LOCAL(uint64_t, uSrc);
2274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2275
2276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2278 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2279 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2280
2281 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2282 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2283
2284 IEM_MC_ADVANCE_RIP_AND_FINISH();
2285 IEM_MC_END();
2286 }
2287}
2288
2289
2290/**
2291 * @opcode 0x11
2292 * @oppfx none
2293 * @opcpuid sse
2294 * @opgroup og_sse_simdfp_datamove
2295 * @opxcpttype 4UA
2296 * @optest op1=1 op2=2 -> op1=2
2297 * @optest op1=0 op2=-42 -> op1=-42
2298 */
2299FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2300{
2301 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2303 if (IEM_IS_MODRM_REG_MODE(bRm))
2304 {
2305 /*
2306 * XMM128, XMM128.
2307 */
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2309 IEM_MC_BEGIN(0, 0);
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2313 IEM_GET_MODRM_REG(pVCpu, bRm));
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * [mem128], XMM128.
2321 */
2322 IEM_MC_BEGIN(0, 2);
2323 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2330
2331 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2332 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP_AND_FINISH();
2335 IEM_MC_END();
2336 }
2337}
2338
2339
2340/**
2341 * @opcode 0x11
2342 * @oppfx 0x66
2343 * @opcpuid sse2
2344 * @opgroup og_sse2_pcksclr_datamove
2345 * @opxcpttype 4UA
2346 * @optest op1=1 op2=2 -> op1=2
2347 * @optest op1=0 op2=-42 -> op1=-42
2348 */
2349FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2350{
2351 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2353 if (IEM_IS_MODRM_REG_MODE(bRm))
2354 {
2355 /*
2356 * XMM128, XMM128.
2357 */
2358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2359 IEM_MC_BEGIN(0, 0);
2360 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2362 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2363 IEM_GET_MODRM_REG(pVCpu, bRm));
2364 IEM_MC_ADVANCE_RIP_AND_FINISH();
2365 IEM_MC_END();
2366 }
2367 else
2368 {
2369 /*
2370 * [mem128], XMM128.
2371 */
2372 IEM_MC_BEGIN(0, 2);
2373 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2375
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2378 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2380
2381 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2382 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2383
2384 IEM_MC_ADVANCE_RIP_AND_FINISH();
2385 IEM_MC_END();
2386 }
2387}
2388
2389
2390/**
2391 * @opcode 0x11
2392 * @oppfx 0xf3
2393 * @opcpuid sse
2394 * @opgroup og_sse_simdfp_datamove
2395 * @opxcpttype 5
2396 * @optest op1=1 op2=2 -> op1=2
2397 * @optest op1=0 op2=-22 -> op1=-22
2398 */
2399FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2400{
2401 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2403 if (IEM_IS_MODRM_REG_MODE(bRm))
2404 {
2405 /*
2406 * XMM32, XMM32.
2407 */
2408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2409 IEM_MC_BEGIN(0, 1);
2410 IEM_MC_LOCAL(uint32_t, uSrc);
2411
2412 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2413 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2414 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2415 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2416
2417 IEM_MC_ADVANCE_RIP_AND_FINISH();
2418 IEM_MC_END();
2419 }
2420 else
2421 {
2422 /*
2423 * [mem32], XMM32.
2424 */
2425 IEM_MC_BEGIN(0, 2);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2431 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2432 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2433
2434 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2435 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2436
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 }
2440}
2441
2442
2443/**
2444 * @opcode 0x11
2445 * @oppfx 0xf2
2446 * @opcpuid sse2
2447 * @opgroup og_sse2_pcksclr_datamove
2448 * @opxcpttype 5
2449 * @optest op1=1 op2=2 -> op1=2
2450 * @optest op1=0 op2=-42 -> op1=-42
2451 */
2452FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2453{
2454 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2456 if (IEM_IS_MODRM_REG_MODE(bRm))
2457 {
2458 /*
2459 * XMM64, XMM64.
2460 */
2461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2462 IEM_MC_BEGIN(0, 1);
2463 IEM_MC_LOCAL(uint64_t, uSrc);
2464
2465 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2466 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2467 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2468 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2469
2470 IEM_MC_ADVANCE_RIP_AND_FINISH();
2471 IEM_MC_END();
2472 }
2473 else
2474 {
2475 /*
2476 * [mem64], XMM64.
2477 */
2478 IEM_MC_BEGIN(0, 2);
2479 IEM_MC_LOCAL(uint64_t, uSrc);
2480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2481
2482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2484 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2485 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2486
2487 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2488 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2489
2490 IEM_MC_ADVANCE_RIP_AND_FINISH();
2491 IEM_MC_END();
2492 }
2493}
2494
2495
2496FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2497{
2498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2499 if (IEM_IS_MODRM_REG_MODE(bRm))
2500 {
2501 /**
2502 * @opcode 0x12
2503 * @opcodesub 11 mr/reg
2504 * @oppfx none
2505 * @opcpuid sse
2506 * @opgroup og_sse_simdfp_datamove
2507 * @opxcpttype 5
2508 * @optest op1=1 op2=2 -> op1=2
2509 * @optest op1=0 op2=-42 -> op1=-42
2510 */
2511 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2512
2513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2514 IEM_MC_BEGIN(0, 1);
2515 IEM_MC_LOCAL(uint64_t, uSrc);
2516
2517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2518 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2519 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2520 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2521
2522 IEM_MC_ADVANCE_RIP_AND_FINISH();
2523 IEM_MC_END();
2524 }
2525 else
2526 {
2527 /**
2528 * @opdone
2529 * @opcode 0x12
2530 * @opcodesub !11 mr/reg
2531 * @oppfx none
2532 * @opcpuid sse
2533 * @opgroup og_sse_simdfp_datamove
2534 * @opxcpttype 5
2535 * @optest op1=1 op2=2 -> op1=2
2536 * @optest op1=0 op2=-42 -> op1=-42
2537 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2538 */
2539 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2540
2541 IEM_MC_BEGIN(0, 2);
2542 IEM_MC_LOCAL(uint64_t, uSrc);
2543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2544
2545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2549
2550 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2551 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2552
2553 IEM_MC_ADVANCE_RIP_AND_FINISH();
2554 IEM_MC_END();
2555 }
2556}
2557
2558
2559/**
2560 * @opcode 0x12
2561 * @opcodesub !11 mr/reg
2562 * @oppfx 0x66
2563 * @opcpuid sse2
2564 * @opgroup og_sse2_pcksclr_datamove
2565 * @opxcpttype 5
2566 * @optest op1=1 op2=2 -> op1=2
2567 * @optest op1=0 op2=-42 -> op1=-42
2568 */
2569FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2570{
2571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2572 if (IEM_IS_MODRM_MEM_MODE(bRm))
2573 {
2574 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2575
2576 IEM_MC_BEGIN(0, 2);
2577 IEM_MC_LOCAL(uint64_t, uSrc);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2579
2580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2582 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2583 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2584
2585 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2586 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2587
2588 IEM_MC_ADVANCE_RIP_AND_FINISH();
2589 IEM_MC_END();
2590 }
2591
2592 /**
2593 * @opdone
2594 * @opmnemonic ud660f12m3
2595 * @opcode 0x12
2596 * @opcodesub 11 mr/reg
2597 * @oppfx 0x66
2598 * @opunused immediate
2599 * @opcpuid sse
2600 * @optest ->
2601 */
2602 else
2603 return IEMOP_RAISE_INVALID_OPCODE();
2604}
2605
2606
2607/**
2608 * @opcode 0x12
2609 * @oppfx 0xf3
2610 * @opcpuid sse3
2611 * @opgroup og_sse3_pcksclr_datamove
2612 * @opxcpttype 4
2613 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2614 * op1=0x00000002000000020000000100000001
2615 */
2616FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2617{
2618 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2620 if (IEM_IS_MODRM_REG_MODE(bRm))
2621 {
2622 /*
2623 * XMM, XMM.
2624 */
2625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2626 IEM_MC_BEGIN(0, 1);
2627 IEM_MC_LOCAL(RTUINT128U, uSrc);
2628
2629 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2630 IEM_MC_PREPARE_SSE_USAGE();
2631
2632 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2633 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2634 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2635 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2636 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2637
2638 IEM_MC_ADVANCE_RIP_AND_FINISH();
2639 IEM_MC_END();
2640 }
2641 else
2642 {
2643 /*
2644 * XMM, [mem128].
2645 */
2646 IEM_MC_BEGIN(0, 2);
2647 IEM_MC_LOCAL(RTUINT128U, uSrc);
2648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2649
2650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2653 IEM_MC_PREPARE_SSE_USAGE();
2654
2655 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2656 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2657 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2658 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2659 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2660
2661 IEM_MC_ADVANCE_RIP_AND_FINISH();
2662 IEM_MC_END();
2663 }
2664}
2665
2666
2667/**
2668 * @opcode 0x12
2669 * @oppfx 0xf2
2670 * @opcpuid sse3
2671 * @opgroup og_sse3_pcksclr_datamove
2672 * @opxcpttype 5
2673 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2674 * op1=0x22222222111111112222222211111111
2675 */
2676FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2677{
2678 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680 if (IEM_IS_MODRM_REG_MODE(bRm))
2681 {
2682 /*
2683 * XMM128, XMM64.
2684 */
2685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2686 IEM_MC_BEGIN(1, 0);
2687 IEM_MC_ARG(uint64_t, uSrc, 0);
2688
2689 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2690 IEM_MC_PREPARE_SSE_USAGE();
2691
2692 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2693 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2694 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2695
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /*
2702 * XMM128, [mem64].
2703 */
2704 IEM_MC_BEGIN(1, 1);
2705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2706 IEM_MC_ARG(uint64_t, uSrc, 0);
2707
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2710 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2711 IEM_MC_PREPARE_SSE_USAGE();
2712
2713 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2714 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2715 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2716
2717 IEM_MC_ADVANCE_RIP_AND_FINISH();
2718 IEM_MC_END();
2719 }
2720}
2721
2722
2723/**
2724 * @opcode 0x13
2725 * @opcodesub !11 mr/reg
2726 * @oppfx none
2727 * @opcpuid sse
2728 * @opgroup og_sse_simdfp_datamove
2729 * @opxcpttype 5
2730 * @optest op1=1 op2=2 -> op1=2
2731 * @optest op1=0 op2=-42 -> op1=-42
2732 */
2733FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2734{
2735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2736 if (IEM_IS_MODRM_MEM_MODE(bRm))
2737 {
2738 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2739
2740 IEM_MC_BEGIN(0, 2);
2741 IEM_MC_LOCAL(uint64_t, uSrc);
2742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2743
2744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2746 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2747 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2748
2749 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2750 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2751
2752 IEM_MC_ADVANCE_RIP_AND_FINISH();
2753 IEM_MC_END();
2754 }
2755
2756 /**
2757 * @opdone
2758 * @opmnemonic ud0f13m3
2759 * @opcode 0x13
2760 * @opcodesub 11 mr/reg
2761 * @oppfx none
2762 * @opunused immediate
2763 * @opcpuid sse
2764 * @optest ->
2765 */
2766 else
2767 return IEMOP_RAISE_INVALID_OPCODE();
2768}
2769
2770
2771/**
2772 * @opcode 0x13
2773 * @opcodesub !11 mr/reg
2774 * @oppfx 0x66
2775 * @opcpuid sse2
2776 * @opgroup og_sse2_pcksclr_datamove
2777 * @opxcpttype 5
2778 * @optest op1=1 op2=2 -> op1=2
2779 * @optest op1=0 op2=-42 -> op1=-42
2780 */
2781FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2782{
2783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2784 if (IEM_IS_MODRM_MEM_MODE(bRm))
2785 {
2786 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2787 IEM_MC_BEGIN(0, 2);
2788 IEM_MC_LOCAL(uint64_t, uSrc);
2789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2790
2791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2793 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2794 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2795
2796 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2797 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2798
2799 IEM_MC_ADVANCE_RIP_AND_FINISH();
2800 IEM_MC_END();
2801 }
2802
2803 /**
2804 * @opdone
2805 * @opmnemonic ud660f13m3
2806 * @opcode 0x13
2807 * @opcodesub 11 mr/reg
2808 * @oppfx 0x66
2809 * @opunused immediate
2810 * @opcpuid sse
2811 * @optest ->
2812 */
2813 else
2814 return IEMOP_RAISE_INVALID_OPCODE();
2815}
2816
2817
2818/**
2819 * @opmnemonic udf30f13
2820 * @opcode 0x13
2821 * @oppfx 0xf3
2822 * @opunused intel-modrm
2823 * @opcpuid sse
2824 * @optest ->
2825 * @opdone
2826 */
2827
2828/**
2829 * @opmnemonic udf20f13
2830 * @opcode 0x13
2831 * @oppfx 0xf2
2832 * @opunused intel-modrm
2833 * @opcpuid sse
2834 * @optest ->
2835 * @opdone
2836 */
2837
2838/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2839FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2840{
2841 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2842 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2843}
2844
2845
2846/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2847FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2848{
2849 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2850 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2851}
2852
2853
2854/**
2855 * @opdone
2856 * @opmnemonic udf30f14
2857 * @opcode 0x14
2858 * @oppfx 0xf3
2859 * @opunused intel-modrm
2860 * @opcpuid sse
2861 * @optest ->
2862 * @opdone
2863 */
2864
2865/**
2866 * @opmnemonic udf20f14
2867 * @opcode 0x14
2868 * @oppfx 0xf2
2869 * @opunused intel-modrm
2870 * @opcpuid sse
2871 * @optest ->
2872 * @opdone
2873 */
2874
2875/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2876FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2877{
2878 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2879 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2880}
2881
2882
2883/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2884FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2885{
2886 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2887 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2888}
2889
2890
2891/* Opcode 0xf3 0x0f 0x15 - invalid */
2892/* Opcode 0xf2 0x0f 0x15 - invalid */
2893
2894/**
2895 * @opdone
2896 * @opmnemonic udf30f15
2897 * @opcode 0x15
2898 * @oppfx 0xf3
2899 * @opunused intel-modrm
2900 * @opcpuid sse
2901 * @optest ->
2902 * @opdone
2903 */
2904
2905/**
2906 * @opmnemonic udf20f15
2907 * @opcode 0x15
2908 * @oppfx 0xf2
2909 * @opunused intel-modrm
2910 * @opcpuid sse
2911 * @optest ->
2912 * @opdone
2913 */
2914
2915FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2916{
2917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2918 if (IEM_IS_MODRM_REG_MODE(bRm))
2919 {
2920 /**
2921 * @opcode 0x16
2922 * @opcodesub 11 mr/reg
2923 * @oppfx none
2924 * @opcpuid sse
2925 * @opgroup og_sse_simdfp_datamove
2926 * @opxcpttype 5
2927 * @optest op1=1 op2=2 -> op1=2
2928 * @optest op1=0 op2=-42 -> op1=-42
2929 */
2930 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2931
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2933 IEM_MC_BEGIN(0, 1);
2934 IEM_MC_LOCAL(uint64_t, uSrc);
2935
2936 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2937 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2938 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2939 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2940
2941 IEM_MC_ADVANCE_RIP_AND_FINISH();
2942 IEM_MC_END();
2943 }
2944 else
2945 {
2946 /**
2947 * @opdone
2948 * @opcode 0x16
2949 * @opcodesub !11 mr/reg
2950 * @oppfx none
2951 * @opcpuid sse
2952 * @opgroup og_sse_simdfp_datamove
2953 * @opxcpttype 5
2954 * @optest op1=1 op2=2 -> op1=2
2955 * @optest op1=0 op2=-42 -> op1=-42
2956 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2957 */
2958 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2959
2960 IEM_MC_BEGIN(0, 2);
2961 IEM_MC_LOCAL(uint64_t, uSrc);
2962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2963
2964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2966 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2968
2969 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2970 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2971
2972 IEM_MC_ADVANCE_RIP_AND_FINISH();
2973 IEM_MC_END();
2974 }
2975}
2976
2977
2978/**
2979 * @opcode 0x16
2980 * @opcodesub !11 mr/reg
2981 * @oppfx 0x66
2982 * @opcpuid sse2
2983 * @opgroup og_sse2_pcksclr_datamove
2984 * @opxcpttype 5
2985 * @optest op1=1 op2=2 -> op1=2
2986 * @optest op1=0 op2=-42 -> op1=-42
2987 */
2988FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2989{
2990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2991 if (IEM_IS_MODRM_MEM_MODE(bRm))
2992 {
2993 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2994 IEM_MC_BEGIN(0, 2);
2995 IEM_MC_LOCAL(uint64_t, uSrc);
2996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2997
2998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3000 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3001 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3002
3003 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3004 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3005
3006 IEM_MC_ADVANCE_RIP_AND_FINISH();
3007 IEM_MC_END();
3008 }
3009
3010 /**
3011 * @opdone
3012 * @opmnemonic ud660f16m3
3013 * @opcode 0x16
3014 * @opcodesub 11 mr/reg
3015 * @oppfx 0x66
3016 * @opunused immediate
3017 * @opcpuid sse
3018 * @optest ->
3019 */
3020 else
3021 return IEMOP_RAISE_INVALID_OPCODE();
3022}
3023
3024
3025/**
3026 * @opcode 0x16
3027 * @oppfx 0xf3
3028 * @opcpuid sse3
3029 * @opgroup og_sse3_pcksclr_datamove
3030 * @opxcpttype 4
3031 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3032 * op1=0x00000002000000020000000100000001
3033 */
3034FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3035{
3036 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3038 if (IEM_IS_MODRM_REG_MODE(bRm))
3039 {
3040 /*
3041 * XMM128, XMM128.
3042 */
3043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3044 IEM_MC_BEGIN(0, 1);
3045 IEM_MC_LOCAL(RTUINT128U, uSrc);
3046
3047 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3048 IEM_MC_PREPARE_SSE_USAGE();
3049
3050 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3051 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3052 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3053 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3054 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3055
3056 IEM_MC_ADVANCE_RIP_AND_FINISH();
3057 IEM_MC_END();
3058 }
3059 else
3060 {
3061 /*
3062 * XMM128, [mem128].
3063 */
3064 IEM_MC_BEGIN(0, 2);
3065 IEM_MC_LOCAL(RTUINT128U, uSrc);
3066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3067
3068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3070 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3071 IEM_MC_PREPARE_SSE_USAGE();
3072
3073 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3074 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3075 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3076 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3077 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3078
3079 IEM_MC_ADVANCE_RIP_AND_FINISH();
3080 IEM_MC_END();
3081 }
3082}
3083
3084/**
3085 * @opdone
3086 * @opmnemonic udf30f16
3087 * @opcode 0x16
3088 * @oppfx 0xf2
3089 * @opunused intel-modrm
3090 * @opcpuid sse
3091 * @optest ->
3092 * @opdone
3093 */
3094
3095
3096/**
3097 * @opcode 0x17
3098 * @opcodesub !11 mr/reg
3099 * @oppfx none
3100 * @opcpuid sse
3101 * @opgroup og_sse_simdfp_datamove
3102 * @opxcpttype 5
3103 * @optest op1=1 op2=2 -> op1=2
3104 * @optest op1=0 op2=-42 -> op1=-42
3105 */
3106FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3107{
3108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3109 if (IEM_IS_MODRM_MEM_MODE(bRm))
3110 {
3111 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3112
3113 IEM_MC_BEGIN(0, 2);
3114 IEM_MC_LOCAL(uint64_t, uSrc);
3115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3116
3117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3119 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3120 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3121
3122 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3123 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3124
3125 IEM_MC_ADVANCE_RIP_AND_FINISH();
3126 IEM_MC_END();
3127 }
3128
3129 /**
3130 * @opdone
3131 * @opmnemonic ud0f17m3
3132 * @opcode 0x17
3133 * @opcodesub 11 mr/reg
3134 * @oppfx none
3135 * @opunused immediate
3136 * @opcpuid sse
3137 * @optest ->
3138 */
3139 else
3140 return IEMOP_RAISE_INVALID_OPCODE();
3141}
3142
3143
3144/**
3145 * @opcode 0x17
3146 * @opcodesub !11 mr/reg
3147 * @oppfx 0x66
3148 * @opcpuid sse2
3149 * @opgroup og_sse2_pcksclr_datamove
3150 * @opxcpttype 5
3151 * @optest op1=1 op2=2 -> op1=2
3152 * @optest op1=0 op2=-42 -> op1=-42
3153 */
3154FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3155{
3156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3157 if (IEM_IS_MODRM_MEM_MODE(bRm))
3158 {
3159 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3160
3161 IEM_MC_BEGIN(0, 2);
3162 IEM_MC_LOCAL(uint64_t, uSrc);
3163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3164
3165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3167 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3168 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3169
3170 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3171 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3172
3173 IEM_MC_ADVANCE_RIP_AND_FINISH();
3174 IEM_MC_END();
3175 }
3176
3177 /**
3178 * @opdone
3179 * @opmnemonic ud660f17m3
3180 * @opcode 0x17
3181 * @opcodesub 11 mr/reg
3182 * @oppfx 0x66
3183 * @opunused immediate
3184 * @opcpuid sse
3185 * @optest ->
3186 */
3187 else
3188 return IEMOP_RAISE_INVALID_OPCODE();
3189}
3190
3191
3192/**
3193 * @opdone
3194 * @opmnemonic udf30f17
3195 * @opcode 0x17
3196 * @oppfx 0xf3
3197 * @opunused intel-modrm
3198 * @opcpuid sse
3199 * @optest ->
3200 * @opdone
3201 */
3202
3203/**
3204 * @opmnemonic udf20f17
3205 * @opcode 0x17
3206 * @oppfx 0xf2
3207 * @opunused intel-modrm
3208 * @opcpuid sse
3209 * @optest ->
3210 * @opdone
3211 */
3212
3213
3214/** Opcode 0x0f 0x18. */
3215FNIEMOP_DEF(iemOp_prefetch_Grp16)
3216{
3217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3218 if (IEM_IS_MODRM_MEM_MODE(bRm))
3219 {
3220 switch (IEM_GET_MODRM_REG_8(bRm))
3221 {
3222 case 4: /* Aliased to /0 for the time being according to AMD. */
3223 case 5: /* Aliased to /0 for the time being according to AMD. */
3224 case 6: /* Aliased to /0 for the time being according to AMD. */
3225 case 7: /* Aliased to /0 for the time being according to AMD. */
3226 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3227 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3228 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3229 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3231 }
3232
3233 IEM_MC_BEGIN(0, 1);
3234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237 /* Currently a NOP. */
3238 NOREF(GCPtrEffSrc);
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242 else
3243 return IEMOP_RAISE_INVALID_OPCODE();
3244}
3245
3246
3247/** Opcode 0x0f 0x19..0x1f. */
3248FNIEMOP_DEF(iemOp_nop_Ev)
3249{
3250 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3252 if (IEM_IS_MODRM_REG_MODE(bRm))
3253 {
3254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3255 IEM_MC_BEGIN(0, 0);
3256 IEM_MC_ADVANCE_RIP_AND_FINISH();
3257 IEM_MC_END();
3258 }
3259 else
3260 {
3261 IEM_MC_BEGIN(0, 1);
3262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3265 /* Currently a NOP. */
3266 NOREF(GCPtrEffSrc);
3267 IEM_MC_ADVANCE_RIP_AND_FINISH();
3268 IEM_MC_END();
3269 }
3270}
3271
3272
3273/** Opcode 0x0f 0x20. */
3274FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3275{
3276 /* mod is ignored, as is operand size overrides. */
3277 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3278 IEMOP_HLP_MIN_386();
3279 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3280 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3281 else
3282 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3283
3284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3285 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3286 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3287 {
3288 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3289 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3290 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3291 iCrReg |= 8;
3292 }
3293 switch (iCrReg)
3294 {
3295 case 0: case 2: case 3: case 4: case 8:
3296 break;
3297 default:
3298 return IEMOP_RAISE_INVALID_OPCODE();
3299 }
3300 IEMOP_HLP_DONE_DECODING();
3301
3302 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3303}
3304
3305
3306/** Opcode 0x0f 0x21. */
3307FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3308{
3309 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3310 IEMOP_HLP_MIN_386();
3311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3314 return IEMOP_RAISE_INVALID_OPCODE();
3315 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3316 IEM_GET_MODRM_RM(pVCpu, bRm),
3317 IEM_GET_MODRM_REG_8(bRm));
3318}
3319
3320
3321/** Opcode 0x0f 0x22. */
3322FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3323{
3324 /* mod is ignored, as is operand size overrides. */
3325 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3326 IEMOP_HLP_MIN_386();
3327 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3328 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3329 else
3330 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3331
3332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3333 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3334 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3335 {
3336 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3337 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3338 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3339 iCrReg |= 8;
3340 }
3341 switch (iCrReg)
3342 {
3343 case 0: case 2: case 3: case 4: case 8:
3344 break;
3345 default:
3346 return IEMOP_RAISE_INVALID_OPCODE();
3347 }
3348 IEMOP_HLP_DONE_DECODING();
3349
3350 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3351}
3352
3353
3354/** Opcode 0x0f 0x23. */
3355FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3356{
3357 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3358 IEMOP_HLP_MIN_386();
3359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3361 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3362 return IEMOP_RAISE_INVALID_OPCODE();
3363 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3364 IEM_GET_MODRM_REG_8(bRm),
3365 IEM_GET_MODRM_RM(pVCpu, bRm));
3366}
3367
3368
3369/** Opcode 0x0f 0x24. */
3370FNIEMOP_DEF(iemOp_mov_Rd_Td)
3371{
3372 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3373 IEMOP_HLP_MIN_386();
3374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3376 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3377 return IEMOP_RAISE_INVALID_OPCODE();
3378 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3379 IEM_GET_MODRM_RM(pVCpu, bRm),
3380 IEM_GET_MODRM_REG_8(bRm));
3381}
3382
3383
3384/** Opcode 0x0f 0x26. */
3385FNIEMOP_DEF(iemOp_mov_Td_Rd)
3386{
3387 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3388 IEMOP_HLP_MIN_386();
3389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3391 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3392 return IEMOP_RAISE_INVALID_OPCODE();
3393 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3394 IEM_GET_MODRM_REG_8(bRm),
3395 IEM_GET_MODRM_RM(pVCpu, bRm));
3396}
3397
3398
3399/**
3400 * @opcode 0x28
3401 * @oppfx none
3402 * @opcpuid sse
3403 * @opgroup og_sse_simdfp_datamove
3404 * @opxcpttype 1
3405 * @optest op1=1 op2=2 -> op1=2
3406 * @optest op1=0 op2=-42 -> op1=-42
3407 */
3408FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3409{
3410 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3412 if (IEM_IS_MODRM_REG_MODE(bRm))
3413 {
3414 /*
3415 * Register, register.
3416 */
3417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3418 IEM_MC_BEGIN(0, 0);
3419 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3420 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3421 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3422 IEM_GET_MODRM_RM(pVCpu, bRm));
3423 IEM_MC_ADVANCE_RIP_AND_FINISH();
3424 IEM_MC_END();
3425 }
3426 else
3427 {
3428 /*
3429 * Register, memory.
3430 */
3431 IEM_MC_BEGIN(0, 2);
3432 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3434
3435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3439
3440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3441 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3442
3443 IEM_MC_ADVANCE_RIP_AND_FINISH();
3444 IEM_MC_END();
3445 }
3446}
3447
3448/**
3449 * @opcode 0x28
3450 * @oppfx 66
3451 * @opcpuid sse2
3452 * @opgroup og_sse2_pcksclr_datamove
3453 * @opxcpttype 1
3454 * @optest op1=1 op2=2 -> op1=2
3455 * @optest op1=0 op2=-42 -> op1=-42
3456 */
3457FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3458{
3459 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3461 if (IEM_IS_MODRM_REG_MODE(bRm))
3462 {
3463 /*
3464 * Register, register.
3465 */
3466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3467 IEM_MC_BEGIN(0, 0);
3468 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3469 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3470 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3471 IEM_GET_MODRM_RM(pVCpu, bRm));
3472 IEM_MC_ADVANCE_RIP_AND_FINISH();
3473 IEM_MC_END();
3474 }
3475 else
3476 {
3477 /*
3478 * Register, memory.
3479 */
3480 IEM_MC_BEGIN(0, 2);
3481 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3483
3484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3486 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3487 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3488
3489 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3490 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3491
3492 IEM_MC_ADVANCE_RIP_AND_FINISH();
3493 IEM_MC_END();
3494 }
3495}
3496
3497/* Opcode 0xf3 0x0f 0x28 - invalid */
3498/* Opcode 0xf2 0x0f 0x28 - invalid */
3499
3500/**
3501 * @opcode 0x29
3502 * @oppfx none
3503 * @opcpuid sse
3504 * @opgroup og_sse_simdfp_datamove
3505 * @opxcpttype 1
3506 * @optest op1=1 op2=2 -> op1=2
3507 * @optest op1=0 op2=-42 -> op1=-42
3508 */
3509FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3510{
3511 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3513 if (IEM_IS_MODRM_REG_MODE(bRm))
3514 {
3515 /*
3516 * Register, register.
3517 */
3518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3519 IEM_MC_BEGIN(0, 0);
3520 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3522 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3523 IEM_GET_MODRM_REG(pVCpu, bRm));
3524 IEM_MC_ADVANCE_RIP_AND_FINISH();
3525 IEM_MC_END();
3526 }
3527 else
3528 {
3529 /*
3530 * Memory, register.
3531 */
3532 IEM_MC_BEGIN(0, 2);
3533 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3535
3536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3538 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3539 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3540
3541 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3542 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3543
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 IEM_MC_END();
3546 }
3547}
3548
3549/**
3550 * @opcode 0x29
3551 * @oppfx 66
3552 * @opcpuid sse2
3553 * @opgroup og_sse2_pcksclr_datamove
3554 * @opxcpttype 1
3555 * @optest op1=1 op2=2 -> op1=2
3556 * @optest op1=0 op2=-42 -> op1=-42
3557 */
3558FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3559{
3560 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3561 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3562 if (IEM_IS_MODRM_REG_MODE(bRm))
3563 {
3564 /*
3565 * Register, register.
3566 */
3567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3568 IEM_MC_BEGIN(0, 0);
3569 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3570 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3571 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3572 IEM_GET_MODRM_REG(pVCpu, bRm));
3573 IEM_MC_ADVANCE_RIP_AND_FINISH();
3574 IEM_MC_END();
3575 }
3576 else
3577 {
3578 /*
3579 * Memory, register.
3580 */
3581 IEM_MC_BEGIN(0, 2);
3582 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3584
3585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3587 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3589
3590 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3591 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3592
3593 IEM_MC_ADVANCE_RIP_AND_FINISH();
3594 IEM_MC_END();
3595 }
3596}
3597
3598/* Opcode 0xf3 0x0f 0x29 - invalid */
3599/* Opcode 0xf2 0x0f 0x29 - invalid */
3600
3601
3602/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3603FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3604{
3605 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3607 if (IEM_IS_MODRM_REG_MODE(bRm))
3608 {
3609 /*
3610 * XMM, MMX
3611 */
3612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3613
3614 IEM_MC_BEGIN(3, 1);
3615 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3616 IEM_MC_LOCAL(X86XMMREG, Dst);
3617 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3618 IEM_MC_ARG(uint64_t, u64Src, 2);
3619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3620 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3621 IEM_MC_PREPARE_FPU_USAGE();
3622 IEM_MC_FPU_TO_MMX_MODE();
3623
3624 IEM_MC_REF_MXCSR(pfMxcsr);
3625 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3626 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3627
3628 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3629 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3630 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3631 } IEM_MC_ELSE() {
3632 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3633 } IEM_MC_ENDIF();
3634
3635 IEM_MC_ADVANCE_RIP_AND_FINISH();
3636 IEM_MC_END();
3637 }
3638 else
3639 {
3640 /*
3641 * XMM, [mem64]
3642 */
3643 IEM_MC_BEGIN(3, 2);
3644 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3645 IEM_MC_LOCAL(X86XMMREG, Dst);
3646 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3647 IEM_MC_ARG(uint64_t, u64Src, 2);
3648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3649
3650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3653 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3654 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3655
3656 IEM_MC_PREPARE_FPU_USAGE();
3657 IEM_MC_FPU_TO_MMX_MODE();
3658 IEM_MC_REF_MXCSR(pfMxcsr);
3659
3660 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3661 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3662 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3663 } IEM_MC_ELSE() {
3664 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3665 } IEM_MC_ENDIF();
3666
3667 IEM_MC_ADVANCE_RIP_AND_FINISH();
3668 IEM_MC_END();
3669 }
3670}
3671
3672
3673/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3674FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3675{
3676 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3678 if (IEM_IS_MODRM_REG_MODE(bRm))
3679 {
3680 /*
3681 * XMM, MMX
3682 */
3683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3684
3685 IEM_MC_BEGIN(3, 1);
3686 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3687 IEM_MC_LOCAL(X86XMMREG, Dst);
3688 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3689 IEM_MC_ARG(uint64_t, u64Src, 2);
3690 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3691 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3692 IEM_MC_PREPARE_FPU_USAGE();
3693 IEM_MC_FPU_TO_MMX_MODE();
3694
3695 IEM_MC_REF_MXCSR(pfMxcsr);
3696 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3697
3698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3699 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3700 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3701 } IEM_MC_ELSE() {
3702 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3703 } IEM_MC_ENDIF();
3704
3705 IEM_MC_ADVANCE_RIP_AND_FINISH();
3706 IEM_MC_END();
3707 }
3708 else
3709 {
3710 /*
3711 * XMM, [mem64]
3712 */
3713 IEM_MC_BEGIN(3, 3);
3714 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3715 IEM_MC_LOCAL(X86XMMREG, Dst);
3716 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3717 IEM_MC_ARG(uint64_t, u64Src, 2);
3718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3719
3720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3722 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3724 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3725
3726 /* Doesn't cause a transition to MMX mode. */
3727 IEM_MC_PREPARE_SSE_USAGE();
3728 IEM_MC_REF_MXCSR(pfMxcsr);
3729
3730 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3731 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3732 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3733 } IEM_MC_ELSE() {
3734 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3735 } IEM_MC_ENDIF();
3736
3737 IEM_MC_ADVANCE_RIP_AND_FINISH();
3738 IEM_MC_END();
3739 }
3740}
3741
3742
3743/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3744FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3745{
3746 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3747
3748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3749 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3750 {
3751 if (IEM_IS_MODRM_REG_MODE(bRm))
3752 {
3753 /* XMM, greg64 */
3754 IEM_MC_BEGIN(3, 2);
3755 IEM_MC_LOCAL(uint32_t, fMxcsr);
3756 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3757 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3758 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3759 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3760
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3762 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3763 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3764
3765 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3766 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3767 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3768 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3769 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3770 } IEM_MC_ELSE() {
3771 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3772 } IEM_MC_ENDIF();
3773
3774 IEM_MC_ADVANCE_RIP_AND_FINISH();
3775 IEM_MC_END();
3776 }
3777 else
3778 {
3779 /* XMM, [mem64] */
3780 IEM_MC_BEGIN(3, 4);
3781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3782 IEM_MC_LOCAL(uint32_t, fMxcsr);
3783 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3784 IEM_MC_LOCAL(int64_t, i64Src);
3785 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3786 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3787 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3788
3789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3791 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3792 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3793
3794 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3795 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3796 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3797 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3798 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3799 } IEM_MC_ELSE() {
3800 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3801 } IEM_MC_ENDIF();
3802
3803 IEM_MC_ADVANCE_RIP_AND_FINISH();
3804 IEM_MC_END();
3805 }
3806 }
3807 else
3808 {
3809 if (IEM_IS_MODRM_REG_MODE(bRm))
3810 {
3811 /* greg, XMM */
3812 IEM_MC_BEGIN(3, 2);
3813 IEM_MC_LOCAL(uint32_t, fMxcsr);
3814 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3815 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3816 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3817 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3818
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3820 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3821 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3822
3823 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3824 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3825 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3826 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3827 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3828 } IEM_MC_ELSE() {
3829 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3830 } IEM_MC_ENDIF();
3831
3832 IEM_MC_ADVANCE_RIP_AND_FINISH();
3833 IEM_MC_END();
3834 }
3835 else
3836 {
3837 /* greg, [mem32] */
3838 IEM_MC_BEGIN(3, 4);
3839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3840 IEM_MC_LOCAL(uint32_t, fMxcsr);
3841 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3842 IEM_MC_LOCAL(int32_t, i32Src);
3843 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3844 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3845 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3846
3847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3849 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3850 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3851
3852 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3853 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3854 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3855 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3856 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3857 } IEM_MC_ELSE() {
3858 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3859 } IEM_MC_ENDIF();
3860
3861 IEM_MC_ADVANCE_RIP_AND_FINISH();
3862 IEM_MC_END();
3863 }
3864 }
3865}
3866
3867
3868/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3869FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3870{
3871 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3872
3873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3874 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3875 {
3876 if (IEM_IS_MODRM_REG_MODE(bRm))
3877 {
3878 /* XMM, greg64 */
3879 IEM_MC_BEGIN(3, 2);
3880 IEM_MC_LOCAL(uint32_t, fMxcsr);
3881 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3882 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3883 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3884 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3885
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3887 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3888 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3889
3890 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3891 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3892 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3893 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3894 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3895 } IEM_MC_ELSE() {
3896 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3897 } IEM_MC_ENDIF();
3898
3899 IEM_MC_ADVANCE_RIP_AND_FINISH();
3900 IEM_MC_END();
3901 }
3902 else
3903 {
3904 /* XMM, [mem64] */
3905 IEM_MC_BEGIN(3, 4);
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3907 IEM_MC_LOCAL(uint32_t, fMxcsr);
3908 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3909 IEM_MC_LOCAL(int64_t, i64Src);
3910 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3911 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3912 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3913
3914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3916 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3917 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3918
3919 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3920 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3921 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3922 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3923 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3926 } IEM_MC_ENDIF();
3927
3928 IEM_MC_ADVANCE_RIP_AND_FINISH();
3929 IEM_MC_END();
3930 }
3931 }
3932 else
3933 {
3934 if (IEM_IS_MODRM_REG_MODE(bRm))
3935 {
3936 /* XMM, greg32 */
3937 IEM_MC_BEGIN(3, 2);
3938 IEM_MC_LOCAL(uint32_t, fMxcsr);
3939 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3940 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3941 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3942 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3943
3944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3945 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3946 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3947
3948 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3949 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3950 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3951 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3952 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3953 } IEM_MC_ELSE() {
3954 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3955 } IEM_MC_ENDIF();
3956
3957 IEM_MC_ADVANCE_RIP_AND_FINISH();
3958 IEM_MC_END();
3959 }
3960 else
3961 {
3962 /* XMM, [mem32] */
3963 IEM_MC_BEGIN(3, 4);
3964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3965 IEM_MC_LOCAL(uint32_t, fMxcsr);
3966 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3967 IEM_MC_LOCAL(int32_t, i32Src);
3968 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3969 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3970 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3971
3972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3974 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3975 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3976
3977 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3978 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3979 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3980 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3981 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3982 } IEM_MC_ELSE() {
3983 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3984 } IEM_MC_ENDIF();
3985
3986 IEM_MC_ADVANCE_RIP_AND_FINISH();
3987 IEM_MC_END();
3988 }
3989 }
3990}
3991
3992
3993/**
3994 * @opcode 0x2b
3995 * @opcodesub !11 mr/reg
3996 * @oppfx none
3997 * @opcpuid sse
3998 * @opgroup og_sse1_cachect
3999 * @opxcpttype 1
4000 * @optest op1=1 op2=2 -> op1=2
4001 * @optest op1=0 op2=-42 -> op1=-42
4002 */
4003FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4004{
4005 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4007 if (IEM_IS_MODRM_MEM_MODE(bRm))
4008 {
4009 /*
4010 * memory, register.
4011 */
4012 IEM_MC_BEGIN(0, 2);
4013 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4015
4016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4018 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4020
4021 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4022 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4023
4024 IEM_MC_ADVANCE_RIP_AND_FINISH();
4025 IEM_MC_END();
4026 }
4027 /* The register, register encoding is invalid. */
4028 else
4029 return IEMOP_RAISE_INVALID_OPCODE();
4030}
4031
4032/**
4033 * @opcode 0x2b
4034 * @opcodesub !11 mr/reg
4035 * @oppfx 0x66
4036 * @opcpuid sse2
4037 * @opgroup og_sse2_cachect
4038 * @opxcpttype 1
4039 * @optest op1=1 op2=2 -> op1=2
4040 * @optest op1=0 op2=-42 -> op1=-42
4041 */
4042FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4043{
4044 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4046 if (IEM_IS_MODRM_MEM_MODE(bRm))
4047 {
4048 /*
4049 * memory, register.
4050 */
4051 IEM_MC_BEGIN(0, 2);
4052 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4054
4055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4057 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4058 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4059
4060 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4061 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4062
4063 IEM_MC_ADVANCE_RIP_AND_FINISH();
4064 IEM_MC_END();
4065 }
4066 /* The register, register encoding is invalid. */
4067 else
4068 return IEMOP_RAISE_INVALID_OPCODE();
4069}
4070/* Opcode 0xf3 0x0f 0x2b - invalid */
4071/* Opcode 0xf2 0x0f 0x2b - invalid */
4072
4073
4074/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4075FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4076{
4077 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4078 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4079 if (IEM_IS_MODRM_REG_MODE(bRm))
4080 {
4081 /*
4082 * Register, register.
4083 */
4084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4085
4086 IEM_MC_BEGIN(3, 1);
4087 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4088 IEM_MC_LOCAL(uint64_t, u64Dst);
4089 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4090 IEM_MC_ARG(uint64_t, u64Src, 2);
4091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4092 IEM_MC_PREPARE_FPU_USAGE();
4093 IEM_MC_FPU_TO_MMX_MODE();
4094
4095 IEM_MC_REF_MXCSR(pfMxcsr);
4096 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4097
4098 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4099 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4100 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4101 } IEM_MC_ELSE() {
4102 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4103 } IEM_MC_ENDIF();
4104
4105 IEM_MC_ADVANCE_RIP_AND_FINISH();
4106 IEM_MC_END();
4107 }
4108 else
4109 {
4110 /*
4111 * Register, memory.
4112 */
4113 IEM_MC_BEGIN(3, 2);
4114 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4115 IEM_MC_LOCAL(uint64_t, u64Dst);
4116 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4117 IEM_MC_ARG(uint64_t, u64Src, 2);
4118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4119
4120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4122 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4123 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4124
4125 IEM_MC_PREPARE_FPU_USAGE();
4126 IEM_MC_FPU_TO_MMX_MODE();
4127 IEM_MC_REF_MXCSR(pfMxcsr);
4128
4129 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4130 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4131 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4132 } IEM_MC_ELSE() {
4133 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4134 } IEM_MC_ENDIF();
4135
4136 IEM_MC_ADVANCE_RIP_AND_FINISH();
4137 IEM_MC_END();
4138 }
4139}
4140
4141
4142/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4143FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4144{
4145 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4147 if (IEM_IS_MODRM_REG_MODE(bRm))
4148 {
4149 /*
4150 * Register, register.
4151 */
4152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4153
4154 IEM_MC_BEGIN(3, 1);
4155 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4156 IEM_MC_LOCAL(uint64_t, u64Dst);
4157 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4158 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4159 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4160 IEM_MC_PREPARE_FPU_USAGE();
4161 IEM_MC_FPU_TO_MMX_MODE();
4162
4163 IEM_MC_REF_MXCSR(pfMxcsr);
4164 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4165
4166 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4167 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4168 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4169 } IEM_MC_ELSE() {
4170 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4171 } IEM_MC_ENDIF();
4172
4173 IEM_MC_ADVANCE_RIP_AND_FINISH();
4174 IEM_MC_END();
4175 }
4176 else
4177 {
4178 /*
4179 * Register, memory.
4180 */
4181 IEM_MC_BEGIN(3, 3);
4182 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4183 IEM_MC_LOCAL(uint64_t, u64Dst);
4184 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4185 IEM_MC_LOCAL(X86XMMREG, uSrc);
4186 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4188
4189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4191 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4192 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4193
4194 IEM_MC_PREPARE_FPU_USAGE();
4195 IEM_MC_FPU_TO_MMX_MODE();
4196
4197 IEM_MC_REF_MXCSR(pfMxcsr);
4198
4199 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4200 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4201 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4202 } IEM_MC_ELSE() {
4203 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4204 } IEM_MC_ENDIF();
4205
4206 IEM_MC_ADVANCE_RIP_AND_FINISH();
4207 IEM_MC_END();
4208 }
4209}
4210
4211
4212/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4213FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4214{
4215 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4216
4217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4218 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4219 {
4220 if (IEM_IS_MODRM_REG_MODE(bRm))
4221 {
4222 /* greg64, XMM */
4223 IEM_MC_BEGIN(3, 2);
4224 IEM_MC_LOCAL(uint32_t, fMxcsr);
4225 IEM_MC_LOCAL(int64_t, i64Dst);
4226 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4227 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4228 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4229
4230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4231 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4232 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4233
4234 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4235 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4236 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4237 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4238 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4239 } IEM_MC_ELSE() {
4240 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4241 } IEM_MC_ENDIF();
4242
4243 IEM_MC_ADVANCE_RIP_AND_FINISH();
4244 IEM_MC_END();
4245 }
4246 else
4247 {
4248 /* greg64, [mem64] */
4249 IEM_MC_BEGIN(3, 4);
4250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4251 IEM_MC_LOCAL(uint32_t, fMxcsr);
4252 IEM_MC_LOCAL(int64_t, i64Dst);
4253 IEM_MC_LOCAL(uint32_t, u32Src);
4254 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4255 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4256 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4257
4258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4260 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4261 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4262
4263 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4264 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4265 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4266 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4267 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4268 } IEM_MC_ELSE() {
4269 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4270 } IEM_MC_ENDIF();
4271
4272 IEM_MC_ADVANCE_RIP_AND_FINISH();
4273 IEM_MC_END();
4274 }
4275 }
4276 else
4277 {
4278 if (IEM_IS_MODRM_REG_MODE(bRm))
4279 {
4280 /* greg, XMM */
4281 IEM_MC_BEGIN(3, 2);
4282 IEM_MC_LOCAL(uint32_t, fMxcsr);
4283 IEM_MC_LOCAL(int32_t, i32Dst);
4284 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4285 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4286 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4287
4288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4289 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4290 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4291
4292 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4293 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4294 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4295 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4296 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4297 } IEM_MC_ELSE() {
4298 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4299 } IEM_MC_ENDIF();
4300
4301 IEM_MC_ADVANCE_RIP_AND_FINISH();
4302 IEM_MC_END();
4303 }
4304 else
4305 {
4306 /* greg, [mem] */
4307 IEM_MC_BEGIN(3, 4);
4308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4309 IEM_MC_LOCAL(uint32_t, fMxcsr);
4310 IEM_MC_LOCAL(int32_t, i32Dst);
4311 IEM_MC_LOCAL(uint32_t, u32Src);
4312 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4313 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4314 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4315
4316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4318 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4319 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4320
4321 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4322 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4323 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4324 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4325 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4326 } IEM_MC_ELSE() {
4327 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4328 } IEM_MC_ENDIF();
4329
4330 IEM_MC_ADVANCE_RIP_AND_FINISH();
4331 IEM_MC_END();
4332 }
4333 }
4334}
4335
4336
4337/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4338FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4339{
4340 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4341
4342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4343 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4344 {
4345 if (IEM_IS_MODRM_REG_MODE(bRm))
4346 {
4347 /* greg64, XMM */
4348 IEM_MC_BEGIN(3, 2);
4349 IEM_MC_LOCAL(uint32_t, fMxcsr);
4350 IEM_MC_LOCAL(int64_t, i64Dst);
4351 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4352 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4353 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4354
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4356 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4357 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4358
4359 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4360 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4361 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4362 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4363 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4364 } IEM_MC_ELSE() {
4365 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4366 } IEM_MC_ENDIF();
4367
4368 IEM_MC_ADVANCE_RIP_AND_FINISH();
4369 IEM_MC_END();
4370 }
4371 else
4372 {
4373 /* greg64, [mem64] */
4374 IEM_MC_BEGIN(3, 4);
4375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4376 IEM_MC_LOCAL(uint32_t, fMxcsr);
4377 IEM_MC_LOCAL(int64_t, i64Dst);
4378 IEM_MC_LOCAL(uint64_t, u64Src);
4379 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4380 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4381 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4382
4383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4385 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4386 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4387
4388 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4389 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4390 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4391 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4392 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4393 } IEM_MC_ELSE() {
4394 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4395 } IEM_MC_ENDIF();
4396
4397 IEM_MC_ADVANCE_RIP_AND_FINISH();
4398 IEM_MC_END();
4399 }
4400 }
4401 else
4402 {
4403 if (IEM_IS_MODRM_REG_MODE(bRm))
4404 {
4405 /* greg, XMM */
4406 IEM_MC_BEGIN(3, 2);
4407 IEM_MC_LOCAL(uint32_t, fMxcsr);
4408 IEM_MC_LOCAL(int32_t, i32Dst);
4409 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4410 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4411 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4412
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4414 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4415 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4416
4417 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4418 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4419 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4420 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4421 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4422 } IEM_MC_ELSE() {
4423 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4424 } IEM_MC_ENDIF();
4425
4426 IEM_MC_ADVANCE_RIP_AND_FINISH();
4427 IEM_MC_END();
4428 }
4429 else
4430 {
4431 /* greg32, [mem32] */
4432 IEM_MC_BEGIN(3, 4);
4433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4434 IEM_MC_LOCAL(uint32_t, fMxcsr);
4435 IEM_MC_LOCAL(int32_t, i32Dst);
4436 IEM_MC_LOCAL(uint64_t, u64Src);
4437 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4438 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4439 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4440
4441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4443 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4444 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4445
4446 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4447 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4448 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4449 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4450 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4451 } IEM_MC_ELSE() {
4452 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4453 } IEM_MC_ENDIF();
4454
4455 IEM_MC_ADVANCE_RIP_AND_FINISH();
4456 IEM_MC_END();
4457 }
4458 }
4459}
4460
4461
4462/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4463FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4464{
4465 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4467 if (IEM_IS_MODRM_REG_MODE(bRm))
4468 {
4469 /*
4470 * Register, register.
4471 */
4472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4473
4474 IEM_MC_BEGIN(3, 1);
4475 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4476 IEM_MC_LOCAL(uint64_t, u64Dst);
4477 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4478 IEM_MC_ARG(uint64_t, u64Src, 2);
4479
4480 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4481 IEM_MC_PREPARE_FPU_USAGE();
4482 IEM_MC_FPU_TO_MMX_MODE();
4483
4484 IEM_MC_REF_MXCSR(pfMxcsr);
4485 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4486
4487 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4488 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4489 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4490 } IEM_MC_ELSE() {
4491 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4492 } IEM_MC_ENDIF();
4493
4494 IEM_MC_ADVANCE_RIP_AND_FINISH();
4495 IEM_MC_END();
4496 }
4497 else
4498 {
4499 /*
4500 * Register, memory.
4501 */
4502 IEM_MC_BEGIN(3, 2);
4503 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4504 IEM_MC_LOCAL(uint64_t, u64Dst);
4505 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4506 IEM_MC_ARG(uint64_t, u64Src, 2);
4507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4508
4509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4511 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4512 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4513
4514 IEM_MC_PREPARE_FPU_USAGE();
4515 IEM_MC_FPU_TO_MMX_MODE();
4516 IEM_MC_REF_MXCSR(pfMxcsr);
4517
4518 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4519 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4520 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4521 } IEM_MC_ELSE() {
4522 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4523 } IEM_MC_ENDIF();
4524
4525 IEM_MC_ADVANCE_RIP_AND_FINISH();
4526 IEM_MC_END();
4527 }
4528}
4529
4530
4531/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4532FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4533{
4534 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4535 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4536 if (IEM_IS_MODRM_REG_MODE(bRm))
4537 {
4538 /*
4539 * Register, register.
4540 */
4541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4542
4543 IEM_MC_BEGIN(3, 1);
4544 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4545 IEM_MC_LOCAL(uint64_t, u64Dst);
4546 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4547 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4548
4549 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4550 IEM_MC_PREPARE_FPU_USAGE();
4551 IEM_MC_FPU_TO_MMX_MODE();
4552
4553 IEM_MC_REF_MXCSR(pfMxcsr);
4554 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4555
4556 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4557 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4558 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4559 } IEM_MC_ELSE() {
4560 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4561 } IEM_MC_ENDIF();
4562
4563 IEM_MC_ADVANCE_RIP_AND_FINISH();
4564 IEM_MC_END();
4565 }
4566 else
4567 {
4568 /*
4569 * Register, memory.
4570 */
4571 IEM_MC_BEGIN(3, 3);
4572 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4573 IEM_MC_LOCAL(uint64_t, u64Dst);
4574 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4575 IEM_MC_LOCAL(X86XMMREG, uSrc);
4576 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4578
4579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4581 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4582 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4583
4584 IEM_MC_PREPARE_FPU_USAGE();
4585 IEM_MC_FPU_TO_MMX_MODE();
4586
4587 IEM_MC_REF_MXCSR(pfMxcsr);
4588
4589 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4590 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4591 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4592 } IEM_MC_ELSE() {
4593 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4594 } IEM_MC_ENDIF();
4595
4596 IEM_MC_ADVANCE_RIP_AND_FINISH();
4597 IEM_MC_END();
4598 }
4599}
4600
4601
4602/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4603FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4604{
4605 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4606
4607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4608 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4609 {
4610 if (IEM_IS_MODRM_REG_MODE(bRm))
4611 {
4612 /* greg64, XMM */
4613 IEM_MC_BEGIN(3, 2);
4614 IEM_MC_LOCAL(uint32_t, fMxcsr);
4615 IEM_MC_LOCAL(int64_t, i64Dst);
4616 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4617 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4618 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4619
4620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4621 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4622 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4623
4624 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4625 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4626 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4627 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4628 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4629 } IEM_MC_ELSE() {
4630 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4631 } IEM_MC_ENDIF();
4632
4633 IEM_MC_ADVANCE_RIP_AND_FINISH();
4634 IEM_MC_END();
4635 }
4636 else
4637 {
4638 /* greg64, [mem64] */
4639 IEM_MC_BEGIN(3, 4);
4640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4641 IEM_MC_LOCAL(uint32_t, fMxcsr);
4642 IEM_MC_LOCAL(int64_t, i64Dst);
4643 IEM_MC_LOCAL(uint32_t, u32Src);
4644 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4645 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4646 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4647
4648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4650 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4651 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4652
4653 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4654 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4655 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4656 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4657 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4658 } IEM_MC_ELSE() {
4659 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4660 } IEM_MC_ENDIF();
4661
4662 IEM_MC_ADVANCE_RIP_AND_FINISH();
4663 IEM_MC_END();
4664 }
4665 }
4666 else
4667 {
4668 if (IEM_IS_MODRM_REG_MODE(bRm))
4669 {
4670 /* greg, XMM */
4671 IEM_MC_BEGIN(3, 2);
4672 IEM_MC_LOCAL(uint32_t, fMxcsr);
4673 IEM_MC_LOCAL(int32_t, i32Dst);
4674 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4675 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4676 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4677
4678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4679 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4680 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4681
4682 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4683 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4684 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4685 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4686 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4687 } IEM_MC_ELSE() {
4688 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4689 } IEM_MC_ENDIF();
4690
4691 IEM_MC_ADVANCE_RIP_AND_FINISH();
4692 IEM_MC_END();
4693 }
4694 else
4695 {
4696 /* greg, [mem] */
4697 IEM_MC_BEGIN(3, 4);
4698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4699 IEM_MC_LOCAL(uint32_t, fMxcsr);
4700 IEM_MC_LOCAL(int32_t, i32Dst);
4701 IEM_MC_LOCAL(uint32_t, u32Src);
4702 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4703 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4704 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4705
4706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4708 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4709 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4710
4711 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4712 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4713 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4714 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4715 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4716 } IEM_MC_ELSE() {
4717 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4718 } IEM_MC_ENDIF();
4719
4720 IEM_MC_ADVANCE_RIP_AND_FINISH();
4721 IEM_MC_END();
4722 }
4723 }
4724}
4725
4726
4727/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4728FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4729{
4730 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4731
4732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4733 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4734 {
4735 if (IEM_IS_MODRM_REG_MODE(bRm))
4736 {
4737 /* greg64, XMM */
4738 IEM_MC_BEGIN(3, 2);
4739 IEM_MC_LOCAL(uint32_t, fMxcsr);
4740 IEM_MC_LOCAL(int64_t, i64Dst);
4741 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4742 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4743 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4744
4745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4746 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4747 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4748
4749 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4750 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4751 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4752 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4753 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4754 } IEM_MC_ELSE() {
4755 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4756 } IEM_MC_ENDIF();
4757
4758 IEM_MC_ADVANCE_RIP_AND_FINISH();
4759 IEM_MC_END();
4760 }
4761 else
4762 {
4763 /* greg64, [mem64] */
4764 IEM_MC_BEGIN(3, 4);
4765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4766 IEM_MC_LOCAL(uint32_t, fMxcsr);
4767 IEM_MC_LOCAL(int64_t, i64Dst);
4768 IEM_MC_LOCAL(uint64_t, u64Src);
4769 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4770 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4771 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4772
4773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4775 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4776 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4777
4778 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4779 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4780 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4781 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4782 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4783 } IEM_MC_ELSE() {
4784 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4785 } IEM_MC_ENDIF();
4786
4787 IEM_MC_ADVANCE_RIP_AND_FINISH();
4788 IEM_MC_END();
4789 }
4790 }
4791 else
4792 {
4793 if (IEM_IS_MODRM_REG_MODE(bRm))
4794 {
4795 /* greg32, XMM */
4796 IEM_MC_BEGIN(3, 2);
4797 IEM_MC_LOCAL(uint32_t, fMxcsr);
4798 IEM_MC_LOCAL(int32_t, i32Dst);
4799 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4800 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4801 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4802
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4804 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4805 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4806
4807 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4808 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4809 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4810 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4811 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4812 } IEM_MC_ELSE() {
4813 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4814 } IEM_MC_ENDIF();
4815
4816 IEM_MC_ADVANCE_RIP_AND_FINISH();
4817 IEM_MC_END();
4818 }
4819 else
4820 {
4821 /* greg32, [mem64] */
4822 IEM_MC_BEGIN(3, 4);
4823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4824 IEM_MC_LOCAL(uint32_t, fMxcsr);
4825 IEM_MC_LOCAL(int32_t, i32Dst);
4826 IEM_MC_LOCAL(uint64_t, u64Src);
4827 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4828 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4829 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4830
4831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4834 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4835
4836 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4837 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4838 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4839 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4840 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4841 } IEM_MC_ELSE() {
4842 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4843 } IEM_MC_ENDIF();
4844
4845 IEM_MC_ADVANCE_RIP_AND_FINISH();
4846 IEM_MC_END();
4847 }
4848 }
4849}
4850
4851
4852/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4853FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4854{
4855 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4857 if (IEM_IS_MODRM_REG_MODE(bRm))
4858 {
4859 /*
4860 * Register, register.
4861 */
4862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4863 IEM_MC_BEGIN(4, 1);
4864 IEM_MC_LOCAL(uint32_t, fEFlags);
4865 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4866 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4867 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4868 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4869 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4870 IEM_MC_PREPARE_SSE_USAGE();
4871 IEM_MC_FETCH_EFLAGS(fEFlags);
4872 IEM_MC_REF_MXCSR(pfMxcsr);
4873 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4874 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4875 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4876 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4877 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4878 } IEM_MC_ELSE() {
4879 IEM_MC_COMMIT_EFLAGS(fEFlags);
4880 } IEM_MC_ENDIF();
4881
4882 IEM_MC_ADVANCE_RIP_AND_FINISH();
4883 IEM_MC_END();
4884 }
4885 else
4886 {
4887 /*
4888 * Register, memory.
4889 */
4890 IEM_MC_BEGIN(4, 3);
4891 IEM_MC_LOCAL(uint32_t, fEFlags);
4892 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4893 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4894 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4895 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4896 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4898
4899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4901 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4902 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4903
4904 IEM_MC_PREPARE_SSE_USAGE();
4905 IEM_MC_FETCH_EFLAGS(fEFlags);
4906 IEM_MC_REF_MXCSR(pfMxcsr);
4907 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4908 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4909 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4910 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4911 } IEM_MC_ELSE() {
4912 IEM_MC_COMMIT_EFLAGS(fEFlags);
4913 } IEM_MC_ENDIF();
4914
4915 IEM_MC_ADVANCE_RIP_AND_FINISH();
4916 IEM_MC_END();
4917 }
4918}
4919
4920
4921/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4922FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4923{
4924 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4926 if (IEM_IS_MODRM_REG_MODE(bRm))
4927 {
4928 /*
4929 * Register, register.
4930 */
4931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4932 IEM_MC_BEGIN(4, 1);
4933 IEM_MC_LOCAL(uint32_t, fEFlags);
4934 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4935 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4936 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4937 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4938 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4939 IEM_MC_PREPARE_SSE_USAGE();
4940 IEM_MC_FETCH_EFLAGS(fEFlags);
4941 IEM_MC_REF_MXCSR(pfMxcsr);
4942 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4943 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4944 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4945 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4946 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4947 } IEM_MC_ELSE() {
4948 IEM_MC_COMMIT_EFLAGS(fEFlags);
4949 } IEM_MC_ENDIF();
4950
4951 IEM_MC_ADVANCE_RIP_AND_FINISH();
4952 IEM_MC_END();
4953 }
4954 else
4955 {
4956 /*
4957 * Register, memory.
4958 */
4959 IEM_MC_BEGIN(4, 3);
4960 IEM_MC_LOCAL(uint32_t, fEFlags);
4961 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4962 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4963 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4964 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4965 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4967
4968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4970 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4971 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4972
4973 IEM_MC_PREPARE_SSE_USAGE();
4974 IEM_MC_FETCH_EFLAGS(fEFlags);
4975 IEM_MC_REF_MXCSR(pfMxcsr);
4976 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4977 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4978 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4979 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4980 } IEM_MC_ELSE() {
4981 IEM_MC_COMMIT_EFLAGS(fEFlags);
4982 } IEM_MC_ENDIF();
4983
4984 IEM_MC_ADVANCE_RIP_AND_FINISH();
4985 IEM_MC_END();
4986 }
4987}
4988
4989
4990/* Opcode 0xf3 0x0f 0x2e - invalid */
4991/* Opcode 0xf2 0x0f 0x2e - invalid */
4992
4993
4994/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4995FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4996{
4997 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4999 if (IEM_IS_MODRM_REG_MODE(bRm))
5000 {
5001 /*
5002 * Register, register.
5003 */
5004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5005 IEM_MC_BEGIN(4, 1);
5006 IEM_MC_LOCAL(uint32_t, fEFlags);
5007 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5008 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5009 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5010 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5012 IEM_MC_PREPARE_SSE_USAGE();
5013 IEM_MC_FETCH_EFLAGS(fEFlags);
5014 IEM_MC_REF_MXCSR(pfMxcsr);
5015 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5016 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5017 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5018 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5019 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5020 } IEM_MC_ELSE() {
5021 IEM_MC_COMMIT_EFLAGS(fEFlags);
5022 } IEM_MC_ENDIF();
5023
5024 IEM_MC_ADVANCE_RIP_AND_FINISH();
5025 IEM_MC_END();
5026 }
5027 else
5028 {
5029 /*
5030 * Register, memory.
5031 */
5032 IEM_MC_BEGIN(4, 3);
5033 IEM_MC_LOCAL(uint32_t, fEFlags);
5034 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5035 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5036 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5037 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5038 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5040
5041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5043 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5044 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5045
5046 IEM_MC_PREPARE_SSE_USAGE();
5047 IEM_MC_FETCH_EFLAGS(fEFlags);
5048 IEM_MC_REF_MXCSR(pfMxcsr);
5049 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5050 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5051 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5052 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5053 } IEM_MC_ELSE() {
5054 IEM_MC_COMMIT_EFLAGS(fEFlags);
5055 } IEM_MC_ENDIF();
5056
5057 IEM_MC_ADVANCE_RIP_AND_FINISH();
5058 IEM_MC_END();
5059 }
5060}
5061
5062
5063/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5064FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5065{
5066 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5068 if (IEM_IS_MODRM_REG_MODE(bRm))
5069 {
5070 /*
5071 * Register, register.
5072 */
5073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5074 IEM_MC_BEGIN(4, 1);
5075 IEM_MC_LOCAL(uint32_t, fEFlags);
5076 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5077 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5078 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5079 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5081 IEM_MC_PREPARE_SSE_USAGE();
5082 IEM_MC_FETCH_EFLAGS(fEFlags);
5083 IEM_MC_REF_MXCSR(pfMxcsr);
5084 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5085 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5086 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5087 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5088 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5089 } IEM_MC_ELSE() {
5090 IEM_MC_COMMIT_EFLAGS(fEFlags);
5091 } IEM_MC_ENDIF();
5092
5093 IEM_MC_ADVANCE_RIP_AND_FINISH();
5094 IEM_MC_END();
5095 }
5096 else
5097 {
5098 /*
5099 * Register, memory.
5100 */
5101 IEM_MC_BEGIN(4, 3);
5102 IEM_MC_LOCAL(uint32_t, fEFlags);
5103 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5104 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5105 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5106 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5107 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5109
5110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5112 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5113 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5114
5115 IEM_MC_PREPARE_SSE_USAGE();
5116 IEM_MC_FETCH_EFLAGS(fEFlags);
5117 IEM_MC_REF_MXCSR(pfMxcsr);
5118 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5119 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5120 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5121 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5122 } IEM_MC_ELSE() {
5123 IEM_MC_COMMIT_EFLAGS(fEFlags);
5124 } IEM_MC_ENDIF();
5125
5126 IEM_MC_ADVANCE_RIP_AND_FINISH();
5127 IEM_MC_END();
5128 }
5129}
5130
5131
5132/* Opcode 0xf3 0x0f 0x2f - invalid */
5133/* Opcode 0xf2 0x0f 0x2f - invalid */
5134
5135/** Opcode 0x0f 0x30. */
5136FNIEMOP_DEF(iemOp_wrmsr)
5137{
5138 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5140 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
5141}
5142
5143
5144/** Opcode 0x0f 0x31. */
5145FNIEMOP_DEF(iemOp_rdtsc)
5146{
5147 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
5150}
5151
5152
5153/** Opcode 0x0f 0x33. */
5154FNIEMOP_DEF(iemOp_rdmsr)
5155{
5156 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5158 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
5159}
5160
5161
5162/** Opcode 0x0f 0x34. */
5163FNIEMOP_DEF(iemOp_rdpmc)
5164{
5165 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5167 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
5168}
5169
5170
5171/** Opcode 0x0f 0x34. */
5172FNIEMOP_DEF(iemOp_sysenter)
5173{
5174 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5176 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
5177}
5178
5179/** Opcode 0x0f 0x35. */
5180FNIEMOP_DEF(iemOp_sysexit)
5181{
5182 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5184 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5185}
5186
5187/** Opcode 0x0f 0x37. */
5188FNIEMOP_STUB(iemOp_getsec);
5189
5190
5191/** Opcode 0x0f 0x38. */
5192FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5193{
5194#ifdef IEM_WITH_THREE_0F_38
5195 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5196 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5197#else
5198 IEMOP_BITCH_ABOUT_STUB();
5199 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5200#endif
5201}
5202
5203
5204/** Opcode 0x0f 0x3a. */
5205FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5206{
5207#ifdef IEM_WITH_THREE_0F_3A
5208 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5209 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5210#else
5211 IEMOP_BITCH_ABOUT_STUB();
5212 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5213#endif
5214}
5215
5216
5217/**
5218 * Implements a conditional move.
5219 *
5220 * Wish there was an obvious way to do this where we could share and reduce
5221 * code bloat.
5222 *
5223 * @param a_Cnd The conditional "microcode" operation.
5224 */
5225#define CMOV_X(a_Cnd) \
5226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5227 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5228 { \
5229 switch (pVCpu->iem.s.enmEffOpSize) \
5230 { \
5231 case IEMMODE_16BIT: \
5232 IEM_MC_BEGIN(0, 1); \
5233 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5234 a_Cnd { \
5235 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5236 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5237 } IEM_MC_ENDIF(); \
5238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5239 IEM_MC_END(); \
5240 break; \
5241 \
5242 case IEMMODE_32BIT: \
5243 IEM_MC_BEGIN(0, 1); \
5244 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5245 a_Cnd { \
5246 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5247 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5248 } IEM_MC_ELSE() { \
5249 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5250 } IEM_MC_ENDIF(); \
5251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5252 IEM_MC_END(); \
5253 break; \
5254 \
5255 case IEMMODE_64BIT: \
5256 IEM_MC_BEGIN(0, 1); \
5257 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5258 a_Cnd { \
5259 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5260 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5261 } IEM_MC_ENDIF(); \
5262 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5263 IEM_MC_END(); \
5264 break; \
5265 \
5266 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5267 } \
5268 } \
5269 else \
5270 { \
5271 switch (pVCpu->iem.s.enmEffOpSize) \
5272 { \
5273 case IEMMODE_16BIT: \
5274 IEM_MC_BEGIN(0, 2); \
5275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5276 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5278 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5279 a_Cnd { \
5280 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5281 } IEM_MC_ENDIF(); \
5282 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5283 IEM_MC_END(); \
5284 break; \
5285 \
5286 case IEMMODE_32BIT: \
5287 IEM_MC_BEGIN(0, 2); \
5288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5289 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5291 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5292 a_Cnd { \
5293 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5294 } IEM_MC_ELSE() { \
5295 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5296 } IEM_MC_ENDIF(); \
5297 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5298 IEM_MC_END(); \
5299 break; \
5300 \
5301 case IEMMODE_64BIT: \
5302 IEM_MC_BEGIN(0, 2); \
5303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5304 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5306 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5307 a_Cnd { \
5308 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5309 } IEM_MC_ENDIF(); \
5310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5311 IEM_MC_END(); \
5312 break; \
5313 \
5314 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5315 } \
5316 } do {} while (0)
5317
5318
5319
5320/** Opcode 0x0f 0x40. */
5321FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5322{
5323 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5324 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5325}
5326
5327
5328/** Opcode 0x0f 0x41. */
5329FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5330{
5331 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5332 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5333}
5334
5335
5336/** Opcode 0x0f 0x42. */
5337FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5338{
5339 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5340 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5341}
5342
5343
5344/** Opcode 0x0f 0x43. */
5345FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5346{
5347 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5348 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5349}
5350
5351
5352/** Opcode 0x0f 0x44. */
5353FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5354{
5355 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5356 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5357}
5358
5359
5360/** Opcode 0x0f 0x45. */
5361FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5362{
5363 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5364 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5365}
5366
5367
5368/** Opcode 0x0f 0x46. */
5369FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5370{
5371 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5372 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5373}
5374
5375
5376/** Opcode 0x0f 0x47. */
5377FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5378{
5379 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5380 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5381}
5382
5383
5384/** Opcode 0x0f 0x48. */
5385FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5386{
5387 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5388 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5389}
5390
5391
5392/** Opcode 0x0f 0x49. */
5393FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5394{
5395 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5396 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5397}
5398
5399
5400/** Opcode 0x0f 0x4a. */
5401FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5402{
5403 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5404 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5405}
5406
5407
5408/** Opcode 0x0f 0x4b. */
5409FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5410{
5411 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5412 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5413}
5414
5415
5416/** Opcode 0x0f 0x4c. */
5417FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5418{
5419 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5420 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5421}
5422
5423
5424/** Opcode 0x0f 0x4d. */
5425FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5426{
5427 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5428 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5429}
5430
5431
5432/** Opcode 0x0f 0x4e. */
5433FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5434{
5435 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5436 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5437}
5438
5439
5440/** Opcode 0x0f 0x4f. */
5441FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5442{
5443 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5444 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5445}
5446
5447#undef CMOV_X
5448
5449/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5450FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5451{
5452 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5454 if (IEM_IS_MODRM_REG_MODE(bRm))
5455 {
5456 /*
5457 * Register, register.
5458 */
5459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5460 IEM_MC_BEGIN(2, 1);
5461 IEM_MC_LOCAL(uint8_t, u8Dst);
5462 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5463 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5464 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5465 IEM_MC_PREPARE_SSE_USAGE();
5466 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5467 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5468 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5469 IEM_MC_ADVANCE_RIP_AND_FINISH();
5470 IEM_MC_END();
5471 }
5472 /* No memory operand. */
5473 else
5474 return IEMOP_RAISE_INVALID_OPCODE();
5475}
5476
5477
5478/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5479FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5480{
5481 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5483 if (IEM_IS_MODRM_REG_MODE(bRm))
5484 {
5485 /*
5486 * Register, register.
5487 */
5488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5489 IEM_MC_BEGIN(2, 1);
5490 IEM_MC_LOCAL(uint8_t, u8Dst);
5491 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5492 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5493 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5494 IEM_MC_PREPARE_SSE_USAGE();
5495 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5496 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5497 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5498 IEM_MC_ADVANCE_RIP_AND_FINISH();
5499 IEM_MC_END();
5500 }
5501 /* No memory operand. */
5502 else
5503 return IEMOP_RAISE_INVALID_OPCODE();
5504
5505}
5506
5507
5508/* Opcode 0xf3 0x0f 0x50 - invalid */
5509/* Opcode 0xf2 0x0f 0x50 - invalid */
5510
5511
5512/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5513FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5514{
5515 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5516 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5517}
5518
5519
5520/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5521FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5522{
5523 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5524 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5525}
5526
5527
5528/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5529FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5530{
5531 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5532 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5533}
5534
5535
5536/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5537FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5538{
5539 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5540 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5541}
5542
5543
5544/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5545FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5546{
5547 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5548 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5549}
5550
5551
5552/* Opcode 0x66 0x0f 0x52 - invalid */
5553
5554
5555/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5556FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5557{
5558 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5559 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5560}
5561
5562
5563/* Opcode 0xf2 0x0f 0x52 - invalid */
5564
5565/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5566FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5567/* Opcode 0x66 0x0f 0x53 - invalid */
5568/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5569FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5570/* Opcode 0xf2 0x0f 0x53 - invalid */
5571
5572
5573/** Opcode 0x0f 0x54 - andps Vps, Wps */
5574FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5575{
5576 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5577 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5578}
5579
5580
5581/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5582FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5583{
5584 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5585 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5586}
5587
5588
5589/* Opcode 0xf3 0x0f 0x54 - invalid */
5590/* Opcode 0xf2 0x0f 0x54 - invalid */
5591
5592
5593/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5594FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5595{
5596 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5597 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5598}
5599
5600
5601/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5602FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5603{
5604 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5605 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5606}
5607
5608
5609/* Opcode 0xf3 0x0f 0x55 - invalid */
5610/* Opcode 0xf2 0x0f 0x55 - invalid */
5611
5612
5613/** Opcode 0x0f 0x56 - orps Vps, Wps */
5614FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5615{
5616 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5617 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5618}
5619
5620
5621/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5622FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5623{
5624 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5625 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5626}
5627
5628
5629/* Opcode 0xf3 0x0f 0x56 - invalid */
5630/* Opcode 0xf2 0x0f 0x56 - invalid */
5631
5632
5633/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5634FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5635{
5636 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5637 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5638}
5639
5640
5641/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5642FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5643{
5644 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5645 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5646}
5647
5648
5649/* Opcode 0xf3 0x0f 0x57 - invalid */
5650/* Opcode 0xf2 0x0f 0x57 - invalid */
5651
5652/** Opcode 0x0f 0x58 - addps Vps, Wps */
5653FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5654{
5655 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5656 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5657}
5658
5659
5660/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5661FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5662{
5663 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5664 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5665}
5666
5667
5668/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5669FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5670{
5671 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5672 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5673}
5674
5675
5676/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5677FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5678{
5679 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5680 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5681}
5682
5683
5684/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5685FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5686{
5687 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5688 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5689}
5690
5691
5692/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5693FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5694{
5695 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5696 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5697}
5698
5699
5700/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5701FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5702{
5703 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5704 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5705}
5706
5707
5708/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5709FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5710{
5711 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5712 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5713}
5714
5715
5716/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5717FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5718{
5719 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5720 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5721}
5722
5723
5724/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5725FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5726{
5727 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5728 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5729}
5730
5731
5732/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5733FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5734{
5735 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5736 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5737}
5738
5739
5740/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5741FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5742{
5743 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5744 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5745}
5746
5747
5748/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5749FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5750{
5751 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5752 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5753}
5754
5755
5756/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5757FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5758{
5759 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5760 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5761}
5762
5763
5764/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5765FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5766{
5767 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5768 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5769}
5770
5771
5772/* Opcode 0xf2 0x0f 0x5b - invalid */
5773
5774
5775/** Opcode 0x0f 0x5c - subps Vps, Wps */
5776FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5777{
5778 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5779 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5780}
5781
5782
5783/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5784FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5785{
5786 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5787 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5788}
5789
5790
5791/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5792FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5793{
5794 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5795 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5796}
5797
5798
5799/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5800FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5801{
5802 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5803 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5804}
5805
5806
5807/** Opcode 0x0f 0x5d - minps Vps, Wps */
5808FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5809{
5810 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5811 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5812}
5813
5814
5815/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5816FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5817{
5818 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5819 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5820}
5821
5822
5823/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5824FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5825{
5826 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5827 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5828}
5829
5830
5831/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5832FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5833{
5834 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5835 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5836}
5837
5838
5839/** Opcode 0x0f 0x5e - divps Vps, Wps */
5840FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5841{
5842 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5843 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5844}
5845
5846
5847/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5848FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5849{
5850 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5851 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5852}
5853
5854
5855/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5856FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5857{
5858 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5859 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5860}
5861
5862
5863/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5864FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5865{
5866 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5867 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5868}
5869
5870
5871/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5872FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5873{
5874 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5875 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5876}
5877
5878
5879/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5880FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5881{
5882 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5883 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5884}
5885
5886
5887/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5888FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5889{
5890 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5891 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5892}
5893
5894
5895/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5896FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5897{
5898 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5899 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5900}
5901
5902
5903/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5904FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5905{
5906 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5907 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5908}
5909
5910
5911/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5912FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5913{
5914 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5915 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5916}
5917
5918
5919/* Opcode 0xf3 0x0f 0x60 - invalid */
5920
5921
5922/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5923FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5924{
5925 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5926 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5927 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5928}
5929
5930
5931/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5932FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5933{
5934 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5935 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5936}
5937
5938
5939/* Opcode 0xf3 0x0f 0x61 - invalid */
5940
5941
5942/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5943FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5944{
5945 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5946 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5947}
5948
5949
5950/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5951FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5952{
5953 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5954 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5955}
5956
5957
5958/* Opcode 0xf3 0x0f 0x62 - invalid */
5959
5960
5961
5962/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5963FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5964{
5965 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5966 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5967}
5968
5969
5970/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5971FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5972{
5973 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5974 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5975}
5976
5977
5978/* Opcode 0xf3 0x0f 0x63 - invalid */
5979
5980
5981/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5982FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5983{
5984 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5985 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5986}
5987
5988
5989/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5990FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5991{
5992 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5993 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
5994}
5995
5996
5997/* Opcode 0xf3 0x0f 0x64 - invalid */
5998
5999
6000/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6001FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6002{
6003 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6004 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6005}
6006
6007
6008/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6009FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6010{
6011 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6012 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6013}
6014
6015
6016/* Opcode 0xf3 0x0f 0x65 - invalid */
6017
6018
6019/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6020FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6021{
6022 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6023 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6024}
6025
6026
6027/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6028FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6029{
6030 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6031 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6032}
6033
6034
6035/* Opcode 0xf3 0x0f 0x66 - invalid */
6036
6037
6038/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6039FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6040{
6041 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6042 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6043}
6044
6045
6046/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6047FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6048{
6049 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6050 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6051}
6052
6053
6054/* Opcode 0xf3 0x0f 0x67 - invalid */
6055
6056
6057/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6058 * @note Intel and AMD both uses Qd for the second parameter, however they
6059 * both list it as a mmX/mem64 operand and intel describes it as being
6060 * loaded as a qword, so it should be Qq, shouldn't it? */
6061FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6062{
6063 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6064 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6065}
6066
6067
6068/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6069FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6070{
6071 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6072 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6073}
6074
6075
6076/* Opcode 0xf3 0x0f 0x68 - invalid */
6077
6078
6079/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6080 * @note Intel and AMD both uses Qd for the second parameter, however they
6081 * both list it as a mmX/mem64 operand and intel describes it as being
6082 * loaded as a qword, so it should be Qq, shouldn't it? */
6083FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6084{
6085 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6086 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6087}
6088
6089
6090/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6091FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6092{
6093 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6094 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6095
6096}
6097
6098
6099/* Opcode 0xf3 0x0f 0x69 - invalid */
6100
6101
6102/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6103 * @note Intel and AMD both uses Qd for the second parameter, however they
6104 * both list it as a mmX/mem64 operand and intel describes it as being
6105 * loaded as a qword, so it should be Qq, shouldn't it? */
6106FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6107{
6108 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6109 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6110}
6111
6112
6113/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6114FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6115{
6116 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6117 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6118}
6119
6120
6121/* Opcode 0xf3 0x0f 0x6a - invalid */
6122
6123
6124/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6125FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6126{
6127 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6128 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6129}
6130
6131
6132/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6133FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6134{
6135 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6136 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6137}
6138
6139
6140/* Opcode 0xf3 0x0f 0x6b - invalid */
6141
6142
6143/* Opcode 0x0f 0x6c - invalid */
6144
6145
6146/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6147FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6148{
6149 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6150 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6151}
6152
6153
6154/* Opcode 0xf3 0x0f 0x6c - invalid */
6155/* Opcode 0xf2 0x0f 0x6c - invalid */
6156
6157
6158/* Opcode 0x0f 0x6d - invalid */
6159
6160
6161/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6162FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6163{
6164 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6165 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6166}
6167
6168
6169/* Opcode 0xf3 0x0f 0x6d - invalid */
6170
6171
6172FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6173{
6174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6175 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6176 {
6177 /**
6178 * @opcode 0x6e
6179 * @opcodesub rex.w=1
6180 * @oppfx none
6181 * @opcpuid mmx
6182 * @opgroup og_mmx_datamove
6183 * @opxcpttype 5
6184 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6185 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6186 */
6187 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6188 if (IEM_IS_MODRM_REG_MODE(bRm))
6189 {
6190 /* MMX, greg64 */
6191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6192 IEM_MC_BEGIN(0, 1);
6193 IEM_MC_LOCAL(uint64_t, u64Tmp);
6194
6195 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6196 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6197 IEM_MC_FPU_TO_MMX_MODE();
6198
6199 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6200 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6201
6202 IEM_MC_ADVANCE_RIP_AND_FINISH();
6203 IEM_MC_END();
6204 }
6205 else
6206 {
6207 /* MMX, [mem64] */
6208 IEM_MC_BEGIN(0, 2);
6209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6210 IEM_MC_LOCAL(uint64_t, u64Tmp);
6211
6212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6214 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6215 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6216 IEM_MC_FPU_TO_MMX_MODE();
6217
6218 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6219 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6220
6221 IEM_MC_ADVANCE_RIP_AND_FINISH();
6222 IEM_MC_END();
6223 }
6224 }
6225 else
6226 {
6227 /**
6228 * @opdone
6229 * @opcode 0x6e
6230 * @opcodesub rex.w=0
6231 * @oppfx none
6232 * @opcpuid mmx
6233 * @opgroup og_mmx_datamove
6234 * @opxcpttype 5
6235 * @opfunction iemOp_movd_q_Pd_Ey
6236 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6237 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6238 */
6239 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6240 if (IEM_IS_MODRM_REG_MODE(bRm))
6241 {
6242 /* MMX, greg32 */
6243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6244 IEM_MC_BEGIN(0, 1);
6245 IEM_MC_LOCAL(uint32_t, u32Tmp);
6246
6247 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6248 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6249 IEM_MC_FPU_TO_MMX_MODE();
6250
6251 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6252 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6253
6254 IEM_MC_ADVANCE_RIP_AND_FINISH();
6255 IEM_MC_END();
6256 }
6257 else
6258 {
6259 /* MMX, [mem32] */
6260 IEM_MC_BEGIN(0, 2);
6261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6262 IEM_MC_LOCAL(uint32_t, u32Tmp);
6263
6264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6267 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6268 IEM_MC_FPU_TO_MMX_MODE();
6269
6270 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6271 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6272
6273 IEM_MC_ADVANCE_RIP_AND_FINISH();
6274 IEM_MC_END();
6275 }
6276 }
6277}
6278
6279FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6280{
6281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6282 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6283 {
6284 /**
6285 * @opcode 0x6e
6286 * @opcodesub rex.w=1
6287 * @oppfx 0x66
6288 * @opcpuid sse2
6289 * @opgroup og_sse2_simdint_datamove
6290 * @opxcpttype 5
6291 * @optest 64-bit / op1=1 op2=2 -> op1=2
6292 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6293 */
6294 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6295 if (IEM_IS_MODRM_REG_MODE(bRm))
6296 {
6297 /* XMM, greg64 */
6298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6299 IEM_MC_BEGIN(0, 1);
6300 IEM_MC_LOCAL(uint64_t, u64Tmp);
6301
6302 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6303 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6304
6305 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6306 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6307
6308 IEM_MC_ADVANCE_RIP_AND_FINISH();
6309 IEM_MC_END();
6310 }
6311 else
6312 {
6313 /* XMM, [mem64] */
6314 IEM_MC_BEGIN(0, 2);
6315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6316 IEM_MC_LOCAL(uint64_t, u64Tmp);
6317
6318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6322
6323 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6324 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6325
6326 IEM_MC_ADVANCE_RIP_AND_FINISH();
6327 IEM_MC_END();
6328 }
6329 }
6330 else
6331 {
6332 /**
6333 * @opdone
6334 * @opcode 0x6e
6335 * @opcodesub rex.w=0
6336 * @oppfx 0x66
6337 * @opcpuid sse2
6338 * @opgroup og_sse2_simdint_datamove
6339 * @opxcpttype 5
6340 * @opfunction iemOp_movd_q_Vy_Ey
6341 * @optest op1=1 op2=2 -> op1=2
6342 * @optest op1=0 op2=-42 -> op1=-42
6343 */
6344 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6345 if (IEM_IS_MODRM_REG_MODE(bRm))
6346 {
6347 /* XMM, greg32 */
6348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6349 IEM_MC_BEGIN(0, 1);
6350 IEM_MC_LOCAL(uint32_t, u32Tmp);
6351
6352 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6353 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6354
6355 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6356 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6357
6358 IEM_MC_ADVANCE_RIP_AND_FINISH();
6359 IEM_MC_END();
6360 }
6361 else
6362 {
6363 /* XMM, [mem32] */
6364 IEM_MC_BEGIN(0, 2);
6365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6366 IEM_MC_LOCAL(uint32_t, u32Tmp);
6367
6368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6370 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6371 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6372
6373 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6374 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6375
6376 IEM_MC_ADVANCE_RIP_AND_FINISH();
6377 IEM_MC_END();
6378 }
6379 }
6380}
6381
6382/* Opcode 0xf3 0x0f 0x6e - invalid */
6383
6384
6385/**
6386 * @opcode 0x6f
6387 * @oppfx none
6388 * @opcpuid mmx
6389 * @opgroup og_mmx_datamove
6390 * @opxcpttype 5
6391 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6392 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6393 */
6394FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6395{
6396 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6398 if (IEM_IS_MODRM_REG_MODE(bRm))
6399 {
6400 /*
6401 * Register, register.
6402 */
6403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6404 IEM_MC_BEGIN(0, 1);
6405 IEM_MC_LOCAL(uint64_t, u64Tmp);
6406
6407 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6408 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6409 IEM_MC_FPU_TO_MMX_MODE();
6410
6411 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6412 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6413
6414 IEM_MC_ADVANCE_RIP_AND_FINISH();
6415 IEM_MC_END();
6416 }
6417 else
6418 {
6419 /*
6420 * Register, memory.
6421 */
6422 IEM_MC_BEGIN(0, 2);
6423 IEM_MC_LOCAL(uint64_t, u64Tmp);
6424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6425
6426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6428 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6429 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6430 IEM_MC_FPU_TO_MMX_MODE();
6431
6432 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6433 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6434
6435 IEM_MC_ADVANCE_RIP_AND_FINISH();
6436 IEM_MC_END();
6437 }
6438}
6439
6440/**
6441 * @opcode 0x6f
6442 * @oppfx 0x66
6443 * @opcpuid sse2
6444 * @opgroup og_sse2_simdint_datamove
6445 * @opxcpttype 1
6446 * @optest op1=1 op2=2 -> op1=2
6447 * @optest op1=0 op2=-42 -> op1=-42
6448 */
6449FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6450{
6451 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6453 if (IEM_IS_MODRM_REG_MODE(bRm))
6454 {
6455 /*
6456 * Register, register.
6457 */
6458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6459 IEM_MC_BEGIN(0, 0);
6460
6461 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6463
6464 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6465 IEM_GET_MODRM_RM(pVCpu, bRm));
6466 IEM_MC_ADVANCE_RIP_AND_FINISH();
6467 IEM_MC_END();
6468 }
6469 else
6470 {
6471 /*
6472 * Register, memory.
6473 */
6474 IEM_MC_BEGIN(0, 2);
6475 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6477
6478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6480 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6481 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6482
6483 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6484 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6485
6486 IEM_MC_ADVANCE_RIP_AND_FINISH();
6487 IEM_MC_END();
6488 }
6489}
6490
6491/**
6492 * @opcode 0x6f
6493 * @oppfx 0xf3
6494 * @opcpuid sse2
6495 * @opgroup og_sse2_simdint_datamove
6496 * @opxcpttype 4UA
6497 * @optest op1=1 op2=2 -> op1=2
6498 * @optest op1=0 op2=-42 -> op1=-42
6499 */
6500FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6501{
6502 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6504 if (IEM_IS_MODRM_REG_MODE(bRm))
6505 {
6506 /*
6507 * Register, register.
6508 */
6509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6510 IEM_MC_BEGIN(0, 0);
6511 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6512 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6513 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6514 IEM_GET_MODRM_RM(pVCpu, bRm));
6515 IEM_MC_ADVANCE_RIP_AND_FINISH();
6516 IEM_MC_END();
6517 }
6518 else
6519 {
6520 /*
6521 * Register, memory.
6522 */
6523 IEM_MC_BEGIN(0, 2);
6524 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6526
6527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6529 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6531 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6532 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6533
6534 IEM_MC_ADVANCE_RIP_AND_FINISH();
6535 IEM_MC_END();
6536 }
6537}
6538
6539
6540/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6541FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6542{
6543 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6545 if (IEM_IS_MODRM_REG_MODE(bRm))
6546 {
6547 /*
6548 * Register, register.
6549 */
6550 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6552
6553 IEM_MC_BEGIN(3, 0);
6554 IEM_MC_ARG(uint64_t *, pDst, 0);
6555 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6556 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6557 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6558 IEM_MC_PREPARE_FPU_USAGE();
6559 IEM_MC_FPU_TO_MMX_MODE();
6560
6561 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6562 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6563 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6564 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6565
6566 IEM_MC_ADVANCE_RIP_AND_FINISH();
6567 IEM_MC_END();
6568 }
6569 else
6570 {
6571 /*
6572 * Register, memory.
6573 */
6574 IEM_MC_BEGIN(3, 2);
6575 IEM_MC_ARG(uint64_t *, pDst, 0);
6576 IEM_MC_LOCAL(uint64_t, uSrc);
6577 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6579
6580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6581 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6582 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6584 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6585 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6586
6587 IEM_MC_PREPARE_FPU_USAGE();
6588 IEM_MC_FPU_TO_MMX_MODE();
6589
6590 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6591 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6592 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6593
6594 IEM_MC_ADVANCE_RIP_AND_FINISH();
6595 IEM_MC_END();
6596 }
6597}
6598
6599
6600/**
6601 * Common worker for SSE2 instructions on the forms:
6602 * pshufd xmm1, xmm2/mem128, imm8
6603 * pshufhw xmm1, xmm2/mem128, imm8
6604 * pshuflw xmm1, xmm2/mem128, imm8
6605 *
6606 * Proper alignment of the 128-bit operand is enforced.
6607 * Exceptions type 4. SSE2 cpuid checks.
6608 */
6609FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6610{
6611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6612 if (IEM_IS_MODRM_REG_MODE(bRm))
6613 {
6614 /*
6615 * Register, register.
6616 */
6617 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6619
6620 IEM_MC_BEGIN(3, 0);
6621 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6622 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6623 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6624 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6625 IEM_MC_PREPARE_SSE_USAGE();
6626 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6627 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6628 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6629 IEM_MC_ADVANCE_RIP_AND_FINISH();
6630 IEM_MC_END();
6631 }
6632 else
6633 {
6634 /*
6635 * Register, memory.
6636 */
6637 IEM_MC_BEGIN(3, 2);
6638 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6639 IEM_MC_LOCAL(RTUINT128U, uSrc);
6640 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6642
6643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6644 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6645 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6648
6649 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6650 IEM_MC_PREPARE_SSE_USAGE();
6651 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6652 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6653
6654 IEM_MC_ADVANCE_RIP_AND_FINISH();
6655 IEM_MC_END();
6656 }
6657}
6658
6659
6660/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6661FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6662{
6663 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6664 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6665}
6666
6667
6668/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6669FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6670{
6671 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6672 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6673}
6674
6675
6676/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6677FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6678{
6679 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6680 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6681}
6682
6683
6684/**
6685 * Common worker for MMX instructions of the form:
6686 * psrlw mm, imm8
6687 * psraw mm, imm8
6688 * psllw mm, imm8
6689 * psrld mm, imm8
6690 * psrad mm, imm8
6691 * pslld mm, imm8
6692 * psrlq mm, imm8
6693 * psllq mm, imm8
6694 *
6695 */
6696FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6697{
6698 if (IEM_IS_MODRM_REG_MODE(bRm))
6699 {
6700 /*
6701 * Register, immediate.
6702 */
6703 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6705
6706 IEM_MC_BEGIN(2, 0);
6707 IEM_MC_ARG(uint64_t *, pDst, 0);
6708 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6709 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6710 IEM_MC_PREPARE_FPU_USAGE();
6711 IEM_MC_FPU_TO_MMX_MODE();
6712
6713 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6714 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6715 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6716
6717 IEM_MC_ADVANCE_RIP_AND_FINISH();
6718 IEM_MC_END();
6719 }
6720 else
6721 {
6722 /*
6723 * Register, memory not supported.
6724 */
6725 /// @todo Caller already enforced register mode?!
6726 AssertFailedReturn(VINF_SUCCESS);
6727 }
6728}
6729
6730
6731/**
6732 * Common worker for SSE2 instructions of the form:
6733 * psrlw xmm, imm8
6734 * psraw xmm, imm8
6735 * psllw xmm, imm8
6736 * psrld xmm, imm8
6737 * psrad xmm, imm8
6738 * pslld xmm, imm8
6739 * psrlq xmm, imm8
6740 * psllq xmm, imm8
6741 *
6742 */
6743FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6744{
6745 if (IEM_IS_MODRM_REG_MODE(bRm))
6746 {
6747 /*
6748 * Register, immediate.
6749 */
6750 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6752
6753 IEM_MC_BEGIN(2, 0);
6754 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6755 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6757 IEM_MC_PREPARE_SSE_USAGE();
6758 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6759 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6760 IEM_MC_ADVANCE_RIP_AND_FINISH();
6761 IEM_MC_END();
6762 }
6763 else
6764 {
6765 /*
6766 * Register, memory.
6767 */
6768 /// @todo Caller already enforced register mode?!
6769 AssertFailedReturn(VINF_SUCCESS);
6770 }
6771}
6772
6773
6774/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6775FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6776{
6777// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6778 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6779}
6780
6781
6782/** Opcode 0x66 0x0f 0x71 11/2. */
6783FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6784{
6785// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6786 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6787}
6788
6789
6790/** Opcode 0x0f 0x71 11/4. */
6791FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6792{
6793// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6794 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6795}
6796
6797
6798/** Opcode 0x66 0x0f 0x71 11/4. */
6799FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6800{
6801// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6802 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6803}
6804
6805
6806/** Opcode 0x0f 0x71 11/6. */
6807FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6808{
6809// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6810 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6811}
6812
6813
6814/** Opcode 0x66 0x0f 0x71 11/6. */
6815FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6816{
6817// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6818 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6819}
6820
6821
6822/**
6823 * Group 12 jump table for register variant.
6824 */
6825IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6826{
6827 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6828 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6829 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6830 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6831 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6832 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6833 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6834 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6835};
6836AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6837
6838
6839/** Opcode 0x0f 0x71. */
6840FNIEMOP_DEF(iemOp_Grp12)
6841{
6842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6843 if (IEM_IS_MODRM_REG_MODE(bRm))
6844 /* register, register */
6845 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6846 + pVCpu->iem.s.idxPrefix], bRm);
6847 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6848}
6849
6850
6851/** Opcode 0x0f 0x72 11/2. */
6852FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6853{
6854// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6855 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6856}
6857
6858
6859/** Opcode 0x66 0x0f 0x72 11/2. */
6860FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6861{
6862// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6863 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6864}
6865
6866
6867/** Opcode 0x0f 0x72 11/4. */
6868FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6869{
6870// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6871 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6872}
6873
6874
6875/** Opcode 0x66 0x0f 0x72 11/4. */
6876FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6877{
6878// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6879 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6880}
6881
6882
6883/** Opcode 0x0f 0x72 11/6. */
6884FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6885{
6886// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6887 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6888}
6889
6890/** Opcode 0x66 0x0f 0x72 11/6. */
6891FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6892{
6893// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6894 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6895}
6896
6897
6898/**
6899 * Group 13 jump table for register variant.
6900 */
6901IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6902{
6903 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6904 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6905 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6906 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6907 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6908 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6909 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6910 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6911};
6912AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6913
6914/** Opcode 0x0f 0x72. */
6915FNIEMOP_DEF(iemOp_Grp13)
6916{
6917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6918 if (IEM_IS_MODRM_REG_MODE(bRm))
6919 /* register, register */
6920 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6921 + pVCpu->iem.s.idxPrefix], bRm);
6922 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6923}
6924
6925
6926/** Opcode 0x0f 0x73 11/2. */
6927FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6928{
6929// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6930 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6931}
6932
6933
6934/** Opcode 0x66 0x0f 0x73 11/2. */
6935FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6936{
6937// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6938 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6939}
6940
6941
6942/** Opcode 0x66 0x0f 0x73 11/3. */
6943FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6944{
6945// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6946 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6947}
6948
6949
6950/** Opcode 0x0f 0x73 11/6. */
6951FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6952{
6953// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6954 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6955}
6956
6957
6958/** Opcode 0x66 0x0f 0x73 11/6. */
6959FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6960{
6961// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6962 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6963}
6964
6965
6966/** Opcode 0x66 0x0f 0x73 11/7. */
6967FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6968{
6969// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6970 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6971}
6972
6973/**
6974 * Group 14 jump table for register variant.
6975 */
6976IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6977{
6978 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6979 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6980 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6981 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6982 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6983 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6984 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6985 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6986};
6987AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6988
6989
6990/** Opcode 0x0f 0x73. */
6991FNIEMOP_DEF(iemOp_Grp14)
6992{
6993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6994 if (IEM_IS_MODRM_REG_MODE(bRm))
6995 /* register, register */
6996 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6997 + pVCpu->iem.s.idxPrefix], bRm);
6998 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6999}
7000
7001
7002/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7003FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7004{
7005 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7006 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7007}
7008
7009
7010/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7011FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7012{
7013 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7014 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7015}
7016
7017
7018/* Opcode 0xf3 0x0f 0x74 - invalid */
7019/* Opcode 0xf2 0x0f 0x74 - invalid */
7020
7021
7022/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7023FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7024{
7025 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7026 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7027}
7028
7029
7030/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7031FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7032{
7033 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7034 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7035}
7036
7037
7038/* Opcode 0xf3 0x0f 0x75 - invalid */
7039/* Opcode 0xf2 0x0f 0x75 - invalid */
7040
7041
7042/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7043FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7044{
7045 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7046 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7047}
7048
7049
7050/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7051FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7052{
7053 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7054 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7055}
7056
7057
7058/* Opcode 0xf3 0x0f 0x76 - invalid */
7059/* Opcode 0xf2 0x0f 0x76 - invalid */
7060
7061
7062/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7063FNIEMOP_DEF(iemOp_emms)
7064{
7065 IEMOP_MNEMONIC(emms, "emms");
7066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7067
7068 IEM_MC_BEGIN(0,0);
7069 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7070 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7071 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7072 IEM_MC_FPU_FROM_MMX_MODE();
7073 IEM_MC_ADVANCE_RIP_AND_FINISH();
7074 IEM_MC_END();
7075}
7076
7077/* Opcode 0x66 0x0f 0x77 - invalid */
7078/* Opcode 0xf3 0x0f 0x77 - invalid */
7079/* Opcode 0xf2 0x0f 0x77 - invalid */
7080
7081/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7082#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7083FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7084{
7085 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7086 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7087 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7088 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7089
7090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7091 if (IEM_IS_MODRM_REG_MODE(bRm))
7092 {
7093 /*
7094 * Register, register.
7095 */
7096 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7097 if (enmEffOpSize == IEMMODE_64BIT)
7098 {
7099 IEM_MC_BEGIN(2, 0);
7100 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7101 IEM_MC_ARG(uint64_t, u64Enc, 1);
7102 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7103 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7104 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7105 IEM_MC_END();
7106 }
7107 else
7108 {
7109 IEM_MC_BEGIN(2, 0);
7110 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7111 IEM_MC_ARG(uint32_t, u32Enc, 1);
7112 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7113 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7114 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7115 IEM_MC_END();
7116 }
7117 }
7118 else
7119 {
7120 /*
7121 * Memory, register.
7122 */
7123 if (enmEffOpSize == IEMMODE_64BIT)
7124 {
7125 IEM_MC_BEGIN(3, 0);
7126 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7127 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7128 IEM_MC_ARG(uint64_t, u64Enc, 2);
7129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7130 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7131 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7132 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7133 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7134 IEM_MC_END();
7135 }
7136 else
7137 {
7138 IEM_MC_BEGIN(3, 0);
7139 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7140 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7141 IEM_MC_ARG(uint32_t, u32Enc, 2);
7142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7143 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7144 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7145 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7146 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7147 IEM_MC_END();
7148 }
7149 }
7150}
7151#else
7152FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7153#endif
7154
7155/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7156FNIEMOP_STUB(iemOp_AmdGrp17);
7157/* Opcode 0xf3 0x0f 0x78 - invalid */
7158/* Opcode 0xf2 0x0f 0x78 - invalid */
7159
7160/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7161#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7162FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7163{
7164 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7165 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7166 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7167 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
7168
7169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7170 if (IEM_IS_MODRM_REG_MODE(bRm))
7171 {
7172 /*
7173 * Register, register.
7174 */
7175 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7176 if (enmEffOpSize == IEMMODE_64BIT)
7177 {
7178 IEM_MC_BEGIN(2, 0);
7179 IEM_MC_ARG(uint64_t, u64Val, 0);
7180 IEM_MC_ARG(uint64_t, u64Enc, 1);
7181 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7182 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7183 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
7184 IEM_MC_END();
7185 }
7186 else
7187 {
7188 IEM_MC_BEGIN(2, 0);
7189 IEM_MC_ARG(uint32_t, u32Val, 0);
7190 IEM_MC_ARG(uint32_t, u32Enc, 1);
7191 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7192 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7193 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
7194 IEM_MC_END();
7195 }
7196 }
7197 else
7198 {
7199 /*
7200 * Register, memory.
7201 */
7202 if (enmEffOpSize == IEMMODE_64BIT)
7203 {
7204 IEM_MC_BEGIN(3, 0);
7205 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7206 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7207 IEM_MC_ARG(uint64_t, u64Enc, 2);
7208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7209 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7210 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7212 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7213 IEM_MC_END();
7214 }
7215 else
7216 {
7217 IEM_MC_BEGIN(3, 0);
7218 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7219 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7220 IEM_MC_ARG(uint32_t, u32Enc, 2);
7221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7222 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7223 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7224 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7225 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7226 IEM_MC_END();
7227 }
7228 }
7229}
7230#else
7231FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7232#endif
7233/* Opcode 0x66 0x0f 0x79 - invalid */
7234/* Opcode 0xf3 0x0f 0x79 - invalid */
7235/* Opcode 0xf2 0x0f 0x79 - invalid */
7236
7237/* Opcode 0x0f 0x7a - invalid */
7238/* Opcode 0x66 0x0f 0x7a - invalid */
7239/* Opcode 0xf3 0x0f 0x7a - invalid */
7240/* Opcode 0xf2 0x0f 0x7a - invalid */
7241
7242/* Opcode 0x0f 0x7b - invalid */
7243/* Opcode 0x66 0x0f 0x7b - invalid */
7244/* Opcode 0xf3 0x0f 0x7b - invalid */
7245/* Opcode 0xf2 0x0f 0x7b - invalid */
7246
7247/* Opcode 0x0f 0x7c - invalid */
7248
7249
7250/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7251FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7252{
7253 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7254 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7255}
7256
7257
7258/* Opcode 0xf3 0x0f 0x7c - invalid */
7259
7260
7261/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7262FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7263{
7264 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7265 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7266}
7267
7268
7269/* Opcode 0x0f 0x7d - invalid */
7270
7271
7272/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7273FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7274{
7275 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7276 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7277}
7278
7279
7280/* Opcode 0xf3 0x0f 0x7d - invalid */
7281
7282
7283/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7284FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7285{
7286 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7287 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7288}
7289
7290
7291/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7292FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7293{
7294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7295 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7296 {
7297 /**
7298 * @opcode 0x7e
7299 * @opcodesub rex.w=1
7300 * @oppfx none
7301 * @opcpuid mmx
7302 * @opgroup og_mmx_datamove
7303 * @opxcpttype 5
7304 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7305 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7306 */
7307 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7308 if (IEM_IS_MODRM_REG_MODE(bRm))
7309 {
7310 /* greg64, MMX */
7311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7312 IEM_MC_BEGIN(0, 1);
7313 IEM_MC_LOCAL(uint64_t, u64Tmp);
7314
7315 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7316 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7317 IEM_MC_FPU_TO_MMX_MODE();
7318
7319 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7320 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7321
7322 IEM_MC_ADVANCE_RIP_AND_FINISH();
7323 IEM_MC_END();
7324 }
7325 else
7326 {
7327 /* [mem64], MMX */
7328 IEM_MC_BEGIN(0, 2);
7329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7330 IEM_MC_LOCAL(uint64_t, u64Tmp);
7331
7332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7334 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7335 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7336 IEM_MC_FPU_TO_MMX_MODE();
7337
7338 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7339 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7340
7341 IEM_MC_ADVANCE_RIP_AND_FINISH();
7342 IEM_MC_END();
7343 }
7344 }
7345 else
7346 {
7347 /**
7348 * @opdone
7349 * @opcode 0x7e
7350 * @opcodesub rex.w=0
7351 * @oppfx none
7352 * @opcpuid mmx
7353 * @opgroup og_mmx_datamove
7354 * @opxcpttype 5
7355 * @opfunction iemOp_movd_q_Pd_Ey
7356 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7357 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7358 */
7359 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7360 if (IEM_IS_MODRM_REG_MODE(bRm))
7361 {
7362 /* greg32, MMX */
7363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7364 IEM_MC_BEGIN(0, 1);
7365 IEM_MC_LOCAL(uint32_t, u32Tmp);
7366
7367 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7368 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7369 IEM_MC_FPU_TO_MMX_MODE();
7370
7371 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7372 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7373
7374 IEM_MC_ADVANCE_RIP_AND_FINISH();
7375 IEM_MC_END();
7376 }
7377 else
7378 {
7379 /* [mem32], MMX */
7380 IEM_MC_BEGIN(0, 2);
7381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7382 IEM_MC_LOCAL(uint32_t, u32Tmp);
7383
7384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7386 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7387 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7388 IEM_MC_FPU_TO_MMX_MODE();
7389
7390 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7391 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7392
7393 IEM_MC_ADVANCE_RIP_AND_FINISH();
7394 IEM_MC_END();
7395 }
7396 }
7397}
7398
7399
7400FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7401{
7402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7403 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7404 {
7405 /**
7406 * @opcode 0x7e
7407 * @opcodesub rex.w=1
7408 * @oppfx 0x66
7409 * @opcpuid sse2
7410 * @opgroup og_sse2_simdint_datamove
7411 * @opxcpttype 5
7412 * @optest 64-bit / op1=1 op2=2 -> op1=2
7413 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7414 */
7415 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7416 if (IEM_IS_MODRM_REG_MODE(bRm))
7417 {
7418 /* greg64, XMM */
7419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7420 IEM_MC_BEGIN(0, 1);
7421 IEM_MC_LOCAL(uint64_t, u64Tmp);
7422
7423 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7424 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7425
7426 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7427 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7428
7429 IEM_MC_ADVANCE_RIP_AND_FINISH();
7430 IEM_MC_END();
7431 }
7432 else
7433 {
7434 /* [mem64], XMM */
7435 IEM_MC_BEGIN(0, 2);
7436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7437 IEM_MC_LOCAL(uint64_t, u64Tmp);
7438
7439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7441 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7443
7444 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7445 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7446
7447 IEM_MC_ADVANCE_RIP_AND_FINISH();
7448 IEM_MC_END();
7449 }
7450 }
7451 else
7452 {
7453 /**
7454 * @opdone
7455 * @opcode 0x7e
7456 * @opcodesub rex.w=0
7457 * @oppfx 0x66
7458 * @opcpuid sse2
7459 * @opgroup og_sse2_simdint_datamove
7460 * @opxcpttype 5
7461 * @opfunction iemOp_movd_q_Vy_Ey
7462 * @optest op1=1 op2=2 -> op1=2
7463 * @optest op1=0 op2=-42 -> op1=-42
7464 */
7465 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7466 if (IEM_IS_MODRM_REG_MODE(bRm))
7467 {
7468 /* greg32, XMM */
7469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7470 IEM_MC_BEGIN(0, 1);
7471 IEM_MC_LOCAL(uint32_t, u32Tmp);
7472
7473 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7474 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7475
7476 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7477 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7478
7479 IEM_MC_ADVANCE_RIP_AND_FINISH();
7480 IEM_MC_END();
7481 }
7482 else
7483 {
7484 /* [mem32], XMM */
7485 IEM_MC_BEGIN(0, 2);
7486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7487 IEM_MC_LOCAL(uint32_t, u32Tmp);
7488
7489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7493
7494 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7495 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7496
7497 IEM_MC_ADVANCE_RIP_AND_FINISH();
7498 IEM_MC_END();
7499 }
7500 }
7501}
7502
7503/**
7504 * @opcode 0x7e
7505 * @oppfx 0xf3
7506 * @opcpuid sse2
7507 * @opgroup og_sse2_pcksclr_datamove
7508 * @opxcpttype none
7509 * @optest op1=1 op2=2 -> op1=2
7510 * @optest op1=0 op2=-42 -> op1=-42
7511 */
7512FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7513{
7514 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7516 if (IEM_IS_MODRM_REG_MODE(bRm))
7517 {
7518 /*
7519 * XMM128, XMM64.
7520 */
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7522 IEM_MC_BEGIN(0, 2);
7523 IEM_MC_LOCAL(uint64_t, uSrc);
7524
7525 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7526 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7527
7528 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7529 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7530
7531 IEM_MC_ADVANCE_RIP_AND_FINISH();
7532 IEM_MC_END();
7533 }
7534 else
7535 {
7536 /*
7537 * XMM128, [mem64].
7538 */
7539 IEM_MC_BEGIN(0, 2);
7540 IEM_MC_LOCAL(uint64_t, uSrc);
7541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7542
7543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7545 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7547
7548 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7549 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7550
7551 IEM_MC_ADVANCE_RIP_AND_FINISH();
7552 IEM_MC_END();
7553 }
7554}
7555
7556/* Opcode 0xf2 0x0f 0x7e - invalid */
7557
7558
7559/** Opcode 0x0f 0x7f - movq Qq, Pq */
7560FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7561{
7562 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7564 if (IEM_IS_MODRM_REG_MODE(bRm))
7565 {
7566 /*
7567 * MMX, MMX.
7568 */
7569 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7570 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7572 IEM_MC_BEGIN(0, 1);
7573 IEM_MC_LOCAL(uint64_t, u64Tmp);
7574 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7575 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7576 IEM_MC_FPU_TO_MMX_MODE();
7577
7578 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7579 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7580
7581 IEM_MC_ADVANCE_RIP_AND_FINISH();
7582 IEM_MC_END();
7583 }
7584 else
7585 {
7586 /*
7587 * [mem64], MMX.
7588 */
7589 IEM_MC_BEGIN(0, 2);
7590 IEM_MC_LOCAL(uint64_t, u64Tmp);
7591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7592
7593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7595 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7596 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7597 IEM_MC_FPU_TO_MMX_MODE();
7598
7599 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7600 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7601
7602 IEM_MC_ADVANCE_RIP_AND_FINISH();
7603 IEM_MC_END();
7604 }
7605}
7606
7607/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7608FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7609{
7610 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7612 if (IEM_IS_MODRM_REG_MODE(bRm))
7613 {
7614 /*
7615 * XMM, XMM.
7616 */
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7618 IEM_MC_BEGIN(0, 0);
7619 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7621 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7622 IEM_GET_MODRM_REG(pVCpu, bRm));
7623 IEM_MC_ADVANCE_RIP_AND_FINISH();
7624 IEM_MC_END();
7625 }
7626 else
7627 {
7628 /*
7629 * [mem128], XMM.
7630 */
7631 IEM_MC_BEGIN(0, 2);
7632 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7634
7635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7637 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7638 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7639
7640 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7641 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7642
7643 IEM_MC_ADVANCE_RIP_AND_FINISH();
7644 IEM_MC_END();
7645 }
7646}
7647
7648/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7649FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7650{
7651 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7653 if (IEM_IS_MODRM_REG_MODE(bRm))
7654 {
7655 /*
7656 * XMM, XMM.
7657 */
7658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7659 IEM_MC_BEGIN(0, 0);
7660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7661 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7662 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7663 IEM_GET_MODRM_REG(pVCpu, bRm));
7664 IEM_MC_ADVANCE_RIP_AND_FINISH();
7665 IEM_MC_END();
7666 }
7667 else
7668 {
7669 /*
7670 * [mem128], XMM.
7671 */
7672 IEM_MC_BEGIN(0, 2);
7673 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7675
7676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7678 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7679 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7680
7681 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7682 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7683
7684 IEM_MC_ADVANCE_RIP_AND_FINISH();
7685 IEM_MC_END();
7686 }
7687}
7688
7689/* Opcode 0xf2 0x0f 0x7f - invalid */
7690
7691
7692
7693/** Opcode 0x0f 0x80. */
7694FNIEMOP_DEF(iemOp_jo_Jv)
7695{
7696 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7697 IEMOP_HLP_MIN_386();
7698 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7699 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7700 {
7701 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7703
7704 IEM_MC_BEGIN(0, 0);
7705 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7706 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7707 } IEM_MC_ELSE() {
7708 IEM_MC_ADVANCE_RIP_AND_FINISH();
7709 } IEM_MC_ENDIF();
7710 IEM_MC_END();
7711 }
7712 else
7713 {
7714 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716
7717 IEM_MC_BEGIN(0, 0);
7718 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7719 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7720 } IEM_MC_ELSE() {
7721 IEM_MC_ADVANCE_RIP_AND_FINISH();
7722 } IEM_MC_ENDIF();
7723 IEM_MC_END();
7724 }
7725}
7726
7727
7728/** Opcode 0x0f 0x81. */
7729FNIEMOP_DEF(iemOp_jno_Jv)
7730{
7731 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7732 IEMOP_HLP_MIN_386();
7733 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7734 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7735 {
7736 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7738
7739 IEM_MC_BEGIN(0, 0);
7740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7741 IEM_MC_ADVANCE_RIP_AND_FINISH();
7742 } IEM_MC_ELSE() {
7743 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7744 } IEM_MC_ENDIF();
7745 IEM_MC_END();
7746 }
7747 else
7748 {
7749 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7751
7752 IEM_MC_BEGIN(0, 0);
7753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 } IEM_MC_ELSE() {
7756 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7757 } IEM_MC_ENDIF();
7758 IEM_MC_END();
7759 }
7760}
7761
7762
7763/** Opcode 0x0f 0x82. */
7764FNIEMOP_DEF(iemOp_jc_Jv)
7765{
7766 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7767 IEMOP_HLP_MIN_386();
7768 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7769 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7770 {
7771 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7773
7774 IEM_MC_BEGIN(0, 0);
7775 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7776 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7777 } IEM_MC_ELSE() {
7778 IEM_MC_ADVANCE_RIP_AND_FINISH();
7779 } IEM_MC_ENDIF();
7780 IEM_MC_END();
7781 }
7782 else
7783 {
7784 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7786
7787 IEM_MC_BEGIN(0, 0);
7788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7789 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7790 } IEM_MC_ELSE() {
7791 IEM_MC_ADVANCE_RIP_AND_FINISH();
7792 } IEM_MC_ENDIF();
7793 IEM_MC_END();
7794 }
7795}
7796
7797
7798/** Opcode 0x0f 0x83. */
7799FNIEMOP_DEF(iemOp_jnc_Jv)
7800{
7801 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7802 IEMOP_HLP_MIN_386();
7803 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7804 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7805 {
7806 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808
7809 IEM_MC_BEGIN(0, 0);
7810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7811 IEM_MC_ADVANCE_RIP_AND_FINISH();
7812 } IEM_MC_ELSE() {
7813 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7814 } IEM_MC_ENDIF();
7815 IEM_MC_END();
7816 }
7817 else
7818 {
7819 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7821
7822 IEM_MC_BEGIN(0, 0);
7823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7824 IEM_MC_ADVANCE_RIP_AND_FINISH();
7825 } IEM_MC_ELSE() {
7826 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7827 } IEM_MC_ENDIF();
7828 IEM_MC_END();
7829 }
7830}
7831
7832
7833/** Opcode 0x0f 0x84. */
7834FNIEMOP_DEF(iemOp_je_Jv)
7835{
7836 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7837 IEMOP_HLP_MIN_386();
7838 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7839 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7840 {
7841 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7843
7844 IEM_MC_BEGIN(0, 0);
7845 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7846 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7847 } IEM_MC_ELSE() {
7848 IEM_MC_ADVANCE_RIP_AND_FINISH();
7849 } IEM_MC_ENDIF();
7850 IEM_MC_END();
7851 }
7852 else
7853 {
7854 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7856
7857 IEM_MC_BEGIN(0, 0);
7858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7859 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7860 } IEM_MC_ELSE() {
7861 IEM_MC_ADVANCE_RIP_AND_FINISH();
7862 } IEM_MC_ENDIF();
7863 IEM_MC_END();
7864 }
7865}
7866
7867
7868/** Opcode 0x0f 0x85. */
7869FNIEMOP_DEF(iemOp_jne_Jv)
7870{
7871 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7872 IEMOP_HLP_MIN_386();
7873 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7874 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7875 {
7876 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7878
7879 IEM_MC_BEGIN(0, 0);
7880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7881 IEM_MC_ADVANCE_RIP_AND_FINISH();
7882 } IEM_MC_ELSE() {
7883 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7884 } IEM_MC_ENDIF();
7885 IEM_MC_END();
7886 }
7887 else
7888 {
7889 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7891
7892 IEM_MC_BEGIN(0, 0);
7893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7894 IEM_MC_ADVANCE_RIP_AND_FINISH();
7895 } IEM_MC_ELSE() {
7896 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7897 } IEM_MC_ENDIF();
7898 IEM_MC_END();
7899 }
7900}
7901
7902
7903/** Opcode 0x0f 0x86. */
7904FNIEMOP_DEF(iemOp_jbe_Jv)
7905{
7906 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7907 IEMOP_HLP_MIN_386();
7908 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7909 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7910 {
7911 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7913
7914 IEM_MC_BEGIN(0, 0);
7915 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7916 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7917 } IEM_MC_ELSE() {
7918 IEM_MC_ADVANCE_RIP_AND_FINISH();
7919 } IEM_MC_ENDIF();
7920 IEM_MC_END();
7921 }
7922 else
7923 {
7924 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7926
7927 IEM_MC_BEGIN(0, 0);
7928 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7929 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7930 } IEM_MC_ELSE() {
7931 IEM_MC_ADVANCE_RIP_AND_FINISH();
7932 } IEM_MC_ENDIF();
7933 IEM_MC_END();
7934 }
7935}
7936
7937
7938/** Opcode 0x0f 0x87. */
7939FNIEMOP_DEF(iemOp_jnbe_Jv)
7940{
7941 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7942 IEMOP_HLP_MIN_386();
7943 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7944 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7945 {
7946 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7948
7949 IEM_MC_BEGIN(0, 0);
7950 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7951 IEM_MC_ADVANCE_RIP_AND_FINISH();
7952 } IEM_MC_ELSE() {
7953 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7954 } IEM_MC_ENDIF();
7955 IEM_MC_END();
7956 }
7957 else
7958 {
7959 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7961
7962 IEM_MC_BEGIN(0, 0);
7963 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7964 IEM_MC_ADVANCE_RIP_AND_FINISH();
7965 } IEM_MC_ELSE() {
7966 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7967 } IEM_MC_ENDIF();
7968 IEM_MC_END();
7969 }
7970}
7971
7972
7973/** Opcode 0x0f 0x88. */
7974FNIEMOP_DEF(iemOp_js_Jv)
7975{
7976 IEMOP_MNEMONIC(js_Jv, "js Jv");
7977 IEMOP_HLP_MIN_386();
7978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7979 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7980 {
7981 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7983
7984 IEM_MC_BEGIN(0, 0);
7985 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7986 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7987 } IEM_MC_ELSE() {
7988 IEM_MC_ADVANCE_RIP_AND_FINISH();
7989 } IEM_MC_ENDIF();
7990 IEM_MC_END();
7991 }
7992 else
7993 {
7994 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7996
7997 IEM_MC_BEGIN(0, 0);
7998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7999 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8000 } IEM_MC_ELSE() {
8001 IEM_MC_ADVANCE_RIP_AND_FINISH();
8002 } IEM_MC_ENDIF();
8003 IEM_MC_END();
8004 }
8005}
8006
8007
8008/** Opcode 0x0f 0x89. */
8009FNIEMOP_DEF(iemOp_jns_Jv)
8010{
8011 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8012 IEMOP_HLP_MIN_386();
8013 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8014 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8015 {
8016 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8018
8019 IEM_MC_BEGIN(0, 0);
8020 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8021 IEM_MC_ADVANCE_RIP_AND_FINISH();
8022 } IEM_MC_ELSE() {
8023 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8024 } IEM_MC_ENDIF();
8025 IEM_MC_END();
8026 }
8027 else
8028 {
8029 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8031
8032 IEM_MC_BEGIN(0, 0);
8033 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8034 IEM_MC_ADVANCE_RIP_AND_FINISH();
8035 } IEM_MC_ELSE() {
8036 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8037 } IEM_MC_ENDIF();
8038 IEM_MC_END();
8039 }
8040}
8041
8042
8043/** Opcode 0x0f 0x8a. */
8044FNIEMOP_DEF(iemOp_jp_Jv)
8045{
8046 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8047 IEMOP_HLP_MIN_386();
8048 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8049 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8050 {
8051 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8053
8054 IEM_MC_BEGIN(0, 0);
8055 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8056 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8057 } IEM_MC_ELSE() {
8058 IEM_MC_ADVANCE_RIP_AND_FINISH();
8059 } IEM_MC_ENDIF();
8060 IEM_MC_END();
8061 }
8062 else
8063 {
8064 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8066
8067 IEM_MC_BEGIN(0, 0);
8068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8069 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8070 } IEM_MC_ELSE() {
8071 IEM_MC_ADVANCE_RIP_AND_FINISH();
8072 } IEM_MC_ENDIF();
8073 IEM_MC_END();
8074 }
8075}
8076
8077
8078/** Opcode 0x0f 0x8b. */
8079FNIEMOP_DEF(iemOp_jnp_Jv)
8080{
8081 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8082 IEMOP_HLP_MIN_386();
8083 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8084 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8085 {
8086 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8088
8089 IEM_MC_BEGIN(0, 0);
8090 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8091 IEM_MC_ADVANCE_RIP_AND_FINISH();
8092 } IEM_MC_ELSE() {
8093 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8094 } IEM_MC_ENDIF();
8095 IEM_MC_END();
8096 }
8097 else
8098 {
8099 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8101
8102 IEM_MC_BEGIN(0, 0);
8103 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8104 IEM_MC_ADVANCE_RIP_AND_FINISH();
8105 } IEM_MC_ELSE() {
8106 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8107 } IEM_MC_ENDIF();
8108 IEM_MC_END();
8109 }
8110}
8111
8112
8113/** Opcode 0x0f 0x8c. */
8114FNIEMOP_DEF(iemOp_jl_Jv)
8115{
8116 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8117 IEMOP_HLP_MIN_386();
8118 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8119 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8120 {
8121 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8123
8124 IEM_MC_BEGIN(0, 0);
8125 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8126 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8127 } IEM_MC_ELSE() {
8128 IEM_MC_ADVANCE_RIP_AND_FINISH();
8129 } IEM_MC_ENDIF();
8130 IEM_MC_END();
8131 }
8132 else
8133 {
8134 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8136
8137 IEM_MC_BEGIN(0, 0);
8138 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8139 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8140 } IEM_MC_ELSE() {
8141 IEM_MC_ADVANCE_RIP_AND_FINISH();
8142 } IEM_MC_ENDIF();
8143 IEM_MC_END();
8144 }
8145}
8146
8147
8148/** Opcode 0x0f 0x8d. */
8149FNIEMOP_DEF(iemOp_jnl_Jv)
8150{
8151 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8152 IEMOP_HLP_MIN_386();
8153 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8154 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8155 {
8156 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8158
8159 IEM_MC_BEGIN(0, 0);
8160 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8161 IEM_MC_ADVANCE_RIP_AND_FINISH();
8162 } IEM_MC_ELSE() {
8163 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8164 } IEM_MC_ENDIF();
8165 IEM_MC_END();
8166 }
8167 else
8168 {
8169 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8171
8172 IEM_MC_BEGIN(0, 0);
8173 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8174 IEM_MC_ADVANCE_RIP_AND_FINISH();
8175 } IEM_MC_ELSE() {
8176 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8177 } IEM_MC_ENDIF();
8178 IEM_MC_END();
8179 }
8180}
8181
8182
8183/** Opcode 0x0f 0x8e. */
8184FNIEMOP_DEF(iemOp_jle_Jv)
8185{
8186 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8187 IEMOP_HLP_MIN_386();
8188 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8189 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8190 {
8191 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193
8194 IEM_MC_BEGIN(0, 0);
8195 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8196 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8197 } IEM_MC_ELSE() {
8198 IEM_MC_ADVANCE_RIP_AND_FINISH();
8199 } IEM_MC_ENDIF();
8200 IEM_MC_END();
8201 }
8202 else
8203 {
8204 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8206
8207 IEM_MC_BEGIN(0, 0);
8208 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8209 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8210 } IEM_MC_ELSE() {
8211 IEM_MC_ADVANCE_RIP_AND_FINISH();
8212 } IEM_MC_ENDIF();
8213 IEM_MC_END();
8214 }
8215}
8216
8217
8218/** Opcode 0x0f 0x8f. */
8219FNIEMOP_DEF(iemOp_jnle_Jv)
8220{
8221 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8222 IEMOP_HLP_MIN_386();
8223 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8224 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8225 {
8226 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8228
8229 IEM_MC_BEGIN(0, 0);
8230 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8231 IEM_MC_ADVANCE_RIP_AND_FINISH();
8232 } IEM_MC_ELSE() {
8233 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8234 } IEM_MC_ENDIF();
8235 IEM_MC_END();
8236 }
8237 else
8238 {
8239 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8241
8242 IEM_MC_BEGIN(0, 0);
8243 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8244 IEM_MC_ADVANCE_RIP_AND_FINISH();
8245 } IEM_MC_ELSE() {
8246 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8247 } IEM_MC_ENDIF();
8248 IEM_MC_END();
8249 }
8250}
8251
8252
8253/** Opcode 0x0f 0x90. */
8254FNIEMOP_DEF(iemOp_seto_Eb)
8255{
8256 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8257 IEMOP_HLP_MIN_386();
8258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8259
8260 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8261 * any way. AMD says it's "unused", whatever that means. We're
8262 * ignoring for now. */
8263 if (IEM_IS_MODRM_REG_MODE(bRm))
8264 {
8265 /* register target */
8266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8267 IEM_MC_BEGIN(0, 0);
8268 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8269 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8270 } IEM_MC_ELSE() {
8271 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8272 } IEM_MC_ENDIF();
8273 IEM_MC_ADVANCE_RIP_AND_FINISH();
8274 IEM_MC_END();
8275 }
8276 else
8277 {
8278 /* memory target */
8279 IEM_MC_BEGIN(0, 1);
8280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8284 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8285 } IEM_MC_ELSE() {
8286 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8287 } IEM_MC_ENDIF();
8288 IEM_MC_ADVANCE_RIP_AND_FINISH();
8289 IEM_MC_END();
8290 }
8291}
8292
8293
8294/** Opcode 0x0f 0x91. */
8295FNIEMOP_DEF(iemOp_setno_Eb)
8296{
8297 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8298 IEMOP_HLP_MIN_386();
8299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8300
8301 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8302 * any way. AMD says it's "unused", whatever that means. We're
8303 * ignoring for now. */
8304 if (IEM_IS_MODRM_REG_MODE(bRm))
8305 {
8306 /* register target */
8307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8308 IEM_MC_BEGIN(0, 0);
8309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8310 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8311 } IEM_MC_ELSE() {
8312 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8313 } IEM_MC_ENDIF();
8314 IEM_MC_ADVANCE_RIP_AND_FINISH();
8315 IEM_MC_END();
8316 }
8317 else
8318 {
8319 /* memory target */
8320 IEM_MC_BEGIN(0, 1);
8321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8324 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8325 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8326 } IEM_MC_ELSE() {
8327 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8328 } IEM_MC_ENDIF();
8329 IEM_MC_ADVANCE_RIP_AND_FINISH();
8330 IEM_MC_END();
8331 }
8332}
8333
8334
8335/** Opcode 0x0f 0x92. */
8336FNIEMOP_DEF(iemOp_setc_Eb)
8337{
8338 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8339 IEMOP_HLP_MIN_386();
8340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8341
8342 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8343 * any way. AMD says it's "unused", whatever that means. We're
8344 * ignoring for now. */
8345 if (IEM_IS_MODRM_REG_MODE(bRm))
8346 {
8347 /* register target */
8348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8349 IEM_MC_BEGIN(0, 0);
8350 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8351 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8352 } IEM_MC_ELSE() {
8353 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8354 } IEM_MC_ENDIF();
8355 IEM_MC_ADVANCE_RIP_AND_FINISH();
8356 IEM_MC_END();
8357 }
8358 else
8359 {
8360 /* memory target */
8361 IEM_MC_BEGIN(0, 1);
8362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8365 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8366 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8367 } IEM_MC_ELSE() {
8368 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8369 } IEM_MC_ENDIF();
8370 IEM_MC_ADVANCE_RIP_AND_FINISH();
8371 IEM_MC_END();
8372 }
8373}
8374
8375
8376/** Opcode 0x0f 0x93. */
8377FNIEMOP_DEF(iemOp_setnc_Eb)
8378{
8379 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8380 IEMOP_HLP_MIN_386();
8381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8382
8383 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8384 * any way. AMD says it's "unused", whatever that means. We're
8385 * ignoring for now. */
8386 if (IEM_IS_MODRM_REG_MODE(bRm))
8387 {
8388 /* register target */
8389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8390 IEM_MC_BEGIN(0, 0);
8391 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8392 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8393 } IEM_MC_ELSE() {
8394 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8395 } IEM_MC_ENDIF();
8396 IEM_MC_ADVANCE_RIP_AND_FINISH();
8397 IEM_MC_END();
8398 }
8399 else
8400 {
8401 /* memory target */
8402 IEM_MC_BEGIN(0, 1);
8403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8406 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8407 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8408 } IEM_MC_ELSE() {
8409 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8410 } IEM_MC_ENDIF();
8411 IEM_MC_ADVANCE_RIP_AND_FINISH();
8412 IEM_MC_END();
8413 }
8414}
8415
8416
8417/** Opcode 0x0f 0x94. */
8418FNIEMOP_DEF(iemOp_sete_Eb)
8419{
8420 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8421 IEMOP_HLP_MIN_386();
8422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8423
8424 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8425 * any way. AMD says it's "unused", whatever that means. We're
8426 * ignoring for now. */
8427 if (IEM_IS_MODRM_REG_MODE(bRm))
8428 {
8429 /* register target */
8430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8431 IEM_MC_BEGIN(0, 0);
8432 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8433 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8434 } IEM_MC_ELSE() {
8435 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8436 } IEM_MC_ENDIF();
8437 IEM_MC_ADVANCE_RIP_AND_FINISH();
8438 IEM_MC_END();
8439 }
8440 else
8441 {
8442 /* memory target */
8443 IEM_MC_BEGIN(0, 1);
8444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8447 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8448 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8449 } IEM_MC_ELSE() {
8450 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8451 } IEM_MC_ENDIF();
8452 IEM_MC_ADVANCE_RIP_AND_FINISH();
8453 IEM_MC_END();
8454 }
8455}
8456
8457
8458/** Opcode 0x0f 0x95. */
8459FNIEMOP_DEF(iemOp_setne_Eb)
8460{
8461 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8462 IEMOP_HLP_MIN_386();
8463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8464
8465 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8466 * any way. AMD says it's "unused", whatever that means. We're
8467 * ignoring for now. */
8468 if (IEM_IS_MODRM_REG_MODE(bRm))
8469 {
8470 /* register target */
8471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8472 IEM_MC_BEGIN(0, 0);
8473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8474 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8475 } IEM_MC_ELSE() {
8476 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8477 } IEM_MC_ENDIF();
8478 IEM_MC_ADVANCE_RIP_AND_FINISH();
8479 IEM_MC_END();
8480 }
8481 else
8482 {
8483 /* memory target */
8484 IEM_MC_BEGIN(0, 1);
8485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8488 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8489 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8490 } IEM_MC_ELSE() {
8491 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8492 } IEM_MC_ENDIF();
8493 IEM_MC_ADVANCE_RIP_AND_FINISH();
8494 IEM_MC_END();
8495 }
8496}
8497
8498
8499/** Opcode 0x0f 0x96. */
8500FNIEMOP_DEF(iemOp_setbe_Eb)
8501{
8502 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8503 IEMOP_HLP_MIN_386();
8504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8505
8506 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8507 * any way. AMD says it's "unused", whatever that means. We're
8508 * ignoring for now. */
8509 if (IEM_IS_MODRM_REG_MODE(bRm))
8510 {
8511 /* register target */
8512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8513 IEM_MC_BEGIN(0, 0);
8514 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8515 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8516 } IEM_MC_ELSE() {
8517 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8518 } IEM_MC_ENDIF();
8519 IEM_MC_ADVANCE_RIP_AND_FINISH();
8520 IEM_MC_END();
8521 }
8522 else
8523 {
8524 /* memory target */
8525 IEM_MC_BEGIN(0, 1);
8526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8529 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8530 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8531 } IEM_MC_ELSE() {
8532 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8533 } IEM_MC_ENDIF();
8534 IEM_MC_ADVANCE_RIP_AND_FINISH();
8535 IEM_MC_END();
8536 }
8537}
8538
8539
8540/** Opcode 0x0f 0x97. */
8541FNIEMOP_DEF(iemOp_setnbe_Eb)
8542{
8543 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8544 IEMOP_HLP_MIN_386();
8545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8546
8547 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8548 * any way. AMD says it's "unused", whatever that means. We're
8549 * ignoring for now. */
8550 if (IEM_IS_MODRM_REG_MODE(bRm))
8551 {
8552 /* register target */
8553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8554 IEM_MC_BEGIN(0, 0);
8555 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8556 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8557 } IEM_MC_ELSE() {
8558 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8559 } IEM_MC_ENDIF();
8560 IEM_MC_ADVANCE_RIP_AND_FINISH();
8561 IEM_MC_END();
8562 }
8563 else
8564 {
8565 /* memory target */
8566 IEM_MC_BEGIN(0, 1);
8567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8570 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8571 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8572 } IEM_MC_ELSE() {
8573 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8574 } IEM_MC_ENDIF();
8575 IEM_MC_ADVANCE_RIP_AND_FINISH();
8576 IEM_MC_END();
8577 }
8578}
8579
8580
8581/** Opcode 0x0f 0x98. */
8582FNIEMOP_DEF(iemOp_sets_Eb)
8583{
8584 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8585 IEMOP_HLP_MIN_386();
8586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8587
8588 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8589 * any way. AMD says it's "unused", whatever that means. We're
8590 * ignoring for now. */
8591 if (IEM_IS_MODRM_REG_MODE(bRm))
8592 {
8593 /* register target */
8594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8595 IEM_MC_BEGIN(0, 0);
8596 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8597 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8598 } IEM_MC_ELSE() {
8599 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8600 } IEM_MC_ENDIF();
8601 IEM_MC_ADVANCE_RIP_AND_FINISH();
8602 IEM_MC_END();
8603 }
8604 else
8605 {
8606 /* memory target */
8607 IEM_MC_BEGIN(0, 1);
8608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8612 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8613 } IEM_MC_ELSE() {
8614 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8615 } IEM_MC_ENDIF();
8616 IEM_MC_ADVANCE_RIP_AND_FINISH();
8617 IEM_MC_END();
8618 }
8619}
8620
8621
8622/** Opcode 0x0f 0x99. */
8623FNIEMOP_DEF(iemOp_setns_Eb)
8624{
8625 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8626 IEMOP_HLP_MIN_386();
8627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8628
8629 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8630 * any way. AMD says it's "unused", whatever that means. We're
8631 * ignoring for now. */
8632 if (IEM_IS_MODRM_REG_MODE(bRm))
8633 {
8634 /* register target */
8635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8636 IEM_MC_BEGIN(0, 0);
8637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8638 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8639 } IEM_MC_ELSE() {
8640 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8641 } IEM_MC_ENDIF();
8642 IEM_MC_ADVANCE_RIP_AND_FINISH();
8643 IEM_MC_END();
8644 }
8645 else
8646 {
8647 /* memory target */
8648 IEM_MC_BEGIN(0, 1);
8649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8653 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8654 } IEM_MC_ELSE() {
8655 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8656 } IEM_MC_ENDIF();
8657 IEM_MC_ADVANCE_RIP_AND_FINISH();
8658 IEM_MC_END();
8659 }
8660}
8661
8662
8663/** Opcode 0x0f 0x9a. */
8664FNIEMOP_DEF(iemOp_setp_Eb)
8665{
8666 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8667 IEMOP_HLP_MIN_386();
8668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8669
8670 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8671 * any way. AMD says it's "unused", whatever that means. We're
8672 * ignoring for now. */
8673 if (IEM_IS_MODRM_REG_MODE(bRm))
8674 {
8675 /* register target */
8676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8677 IEM_MC_BEGIN(0, 0);
8678 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8679 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8680 } IEM_MC_ELSE() {
8681 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8682 } IEM_MC_ENDIF();
8683 IEM_MC_ADVANCE_RIP_AND_FINISH();
8684 IEM_MC_END();
8685 }
8686 else
8687 {
8688 /* memory target */
8689 IEM_MC_BEGIN(0, 1);
8690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8693 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8695 } IEM_MC_ELSE() {
8696 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8697 } IEM_MC_ENDIF();
8698 IEM_MC_ADVANCE_RIP_AND_FINISH();
8699 IEM_MC_END();
8700 }
8701}
8702
8703
8704/** Opcode 0x0f 0x9b. */
8705FNIEMOP_DEF(iemOp_setnp_Eb)
8706{
8707 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8708 IEMOP_HLP_MIN_386();
8709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8710
8711 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8712 * any way. AMD says it's "unused", whatever that means. We're
8713 * ignoring for now. */
8714 if (IEM_IS_MODRM_REG_MODE(bRm))
8715 {
8716 /* register target */
8717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8718 IEM_MC_BEGIN(0, 0);
8719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8720 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8721 } IEM_MC_ELSE() {
8722 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8723 } IEM_MC_ENDIF();
8724 IEM_MC_ADVANCE_RIP_AND_FINISH();
8725 IEM_MC_END();
8726 }
8727 else
8728 {
8729 /* memory target */
8730 IEM_MC_BEGIN(0, 1);
8731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8735 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8736 } IEM_MC_ELSE() {
8737 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8738 } IEM_MC_ENDIF();
8739 IEM_MC_ADVANCE_RIP_AND_FINISH();
8740 IEM_MC_END();
8741 }
8742}
8743
8744
8745/** Opcode 0x0f 0x9c. */
8746FNIEMOP_DEF(iemOp_setl_Eb)
8747{
8748 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8749 IEMOP_HLP_MIN_386();
8750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8751
8752 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8753 * any way. AMD says it's "unused", whatever that means. We're
8754 * ignoring for now. */
8755 if (IEM_IS_MODRM_REG_MODE(bRm))
8756 {
8757 /* register target */
8758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8759 IEM_MC_BEGIN(0, 0);
8760 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8761 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8762 } IEM_MC_ELSE() {
8763 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8764 } IEM_MC_ENDIF();
8765 IEM_MC_ADVANCE_RIP_AND_FINISH();
8766 IEM_MC_END();
8767 }
8768 else
8769 {
8770 /* memory target */
8771 IEM_MC_BEGIN(0, 1);
8772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8775 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8776 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8777 } IEM_MC_ELSE() {
8778 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8779 } IEM_MC_ENDIF();
8780 IEM_MC_ADVANCE_RIP_AND_FINISH();
8781 IEM_MC_END();
8782 }
8783}
8784
8785
8786/** Opcode 0x0f 0x9d. */
8787FNIEMOP_DEF(iemOp_setnl_Eb)
8788{
8789 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8790 IEMOP_HLP_MIN_386();
8791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8792
8793 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8794 * any way. AMD says it's "unused", whatever that means. We're
8795 * ignoring for now. */
8796 if (IEM_IS_MODRM_REG_MODE(bRm))
8797 {
8798 /* register target */
8799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8800 IEM_MC_BEGIN(0, 0);
8801 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8802 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8803 } IEM_MC_ELSE() {
8804 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8805 } IEM_MC_ENDIF();
8806 IEM_MC_ADVANCE_RIP_AND_FINISH();
8807 IEM_MC_END();
8808 }
8809 else
8810 {
8811 /* memory target */
8812 IEM_MC_BEGIN(0, 1);
8813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8816 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8817 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8818 } IEM_MC_ELSE() {
8819 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8820 } IEM_MC_ENDIF();
8821 IEM_MC_ADVANCE_RIP_AND_FINISH();
8822 IEM_MC_END();
8823 }
8824}
8825
8826
8827/** Opcode 0x0f 0x9e. */
8828FNIEMOP_DEF(iemOp_setle_Eb)
8829{
8830 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8831 IEMOP_HLP_MIN_386();
8832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8833
8834 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8835 * any way. AMD says it's "unused", whatever that means. We're
8836 * ignoring for now. */
8837 if (IEM_IS_MODRM_REG_MODE(bRm))
8838 {
8839 /* register target */
8840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8841 IEM_MC_BEGIN(0, 0);
8842 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8843 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8844 } IEM_MC_ELSE() {
8845 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8846 } IEM_MC_ENDIF();
8847 IEM_MC_ADVANCE_RIP_AND_FINISH();
8848 IEM_MC_END();
8849 }
8850 else
8851 {
8852 /* memory target */
8853 IEM_MC_BEGIN(0, 1);
8854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8857 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8858 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8859 } IEM_MC_ELSE() {
8860 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8861 } IEM_MC_ENDIF();
8862 IEM_MC_ADVANCE_RIP_AND_FINISH();
8863 IEM_MC_END();
8864 }
8865}
8866
8867
8868/** Opcode 0x0f 0x9f. */
8869FNIEMOP_DEF(iemOp_setnle_Eb)
8870{
8871 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8872 IEMOP_HLP_MIN_386();
8873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8874
8875 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8876 * any way. AMD says it's "unused", whatever that means. We're
8877 * ignoring for now. */
8878 if (IEM_IS_MODRM_REG_MODE(bRm))
8879 {
8880 /* register target */
8881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8882 IEM_MC_BEGIN(0, 0);
8883 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8884 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8885 } IEM_MC_ELSE() {
8886 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8887 } IEM_MC_ENDIF();
8888 IEM_MC_ADVANCE_RIP_AND_FINISH();
8889 IEM_MC_END();
8890 }
8891 else
8892 {
8893 /* memory target */
8894 IEM_MC_BEGIN(0, 1);
8895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8898 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8899 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8900 } IEM_MC_ELSE() {
8901 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8902 } IEM_MC_ENDIF();
8903 IEM_MC_ADVANCE_RIP_AND_FINISH();
8904 IEM_MC_END();
8905 }
8906}
8907
8908
8909/**
8910 * Common 'push segment-register' helper.
8911 */
8912FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
8913{
8914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8915 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
8916 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8917
8918 switch (pVCpu->iem.s.enmEffOpSize)
8919 {
8920 case IEMMODE_16BIT:
8921 IEM_MC_BEGIN(0, 1);
8922 IEM_MC_LOCAL(uint16_t, u16Value);
8923 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
8924 IEM_MC_PUSH_U16(u16Value);
8925 IEM_MC_ADVANCE_RIP_AND_FINISH();
8926 IEM_MC_END();
8927 break;
8928
8929 case IEMMODE_32BIT:
8930 IEM_MC_BEGIN(0, 1);
8931 IEM_MC_LOCAL(uint32_t, u32Value);
8932 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
8933 IEM_MC_PUSH_U32_SREG(u32Value);
8934 IEM_MC_ADVANCE_RIP_AND_FINISH();
8935 IEM_MC_END();
8936 break;
8937
8938 case IEMMODE_64BIT:
8939 IEM_MC_BEGIN(0, 1);
8940 IEM_MC_LOCAL(uint64_t, u64Value);
8941 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
8942 IEM_MC_PUSH_U64(u64Value);
8943 IEM_MC_ADVANCE_RIP_AND_FINISH();
8944 IEM_MC_END();
8945 break;
8946
8947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8948 }
8949}
8950
8951
8952/** Opcode 0x0f 0xa0. */
8953FNIEMOP_DEF(iemOp_push_fs)
8954{
8955 IEMOP_MNEMONIC(push_fs, "push fs");
8956 IEMOP_HLP_MIN_386();
8957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8958 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8959}
8960
8961
8962/** Opcode 0x0f 0xa1. */
8963FNIEMOP_DEF(iemOp_pop_fs)
8964{
8965 IEMOP_MNEMONIC(pop_fs, "pop fs");
8966 IEMOP_HLP_MIN_386();
8967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8968 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8969}
8970
8971
8972/** Opcode 0x0f 0xa2. */
8973FNIEMOP_DEF(iemOp_cpuid)
8974{
8975 IEMOP_MNEMONIC(cpuid, "cpuid");
8976 IEMOP_HLP_MIN_486(); /* not all 486es. */
8977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8978 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
8979}
8980
8981
8982/**
8983 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8984 * iemOp_bts_Ev_Gv.
8985 */
8986#define IEMOP_BODY_BIT_Ev_Gv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
8987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8988 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8989 \
8990 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8991 { \
8992 /* register destination. */ \
8993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8994 switch (pVCpu->iem.s.enmEffOpSize) \
8995 { \
8996 case IEMMODE_16BIT: \
8997 IEM_MC_BEGIN(3, 0); \
8998 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8999 IEM_MC_ARG(uint16_t, u16Src, 1); \
9000 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9001 \
9002 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9003 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9004 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9005 IEM_MC_REF_EFLAGS(pEFlags); \
9006 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9007 \
9008 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9009 IEM_MC_END(); \
9010 break; \
9011 \
9012 case IEMMODE_32BIT: \
9013 IEM_MC_BEGIN(3, 0); \
9014 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9015 IEM_MC_ARG(uint32_t, u32Src, 1); \
9016 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9017 \
9018 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9019 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9020 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9021 IEM_MC_REF_EFLAGS(pEFlags); \
9022 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9023 \
9024 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
9025 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9026 IEM_MC_END(); \
9027 break; \
9028 \
9029 case IEMMODE_64BIT: \
9030 IEM_MC_BEGIN(3, 0); \
9031 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9032 IEM_MC_ARG(uint64_t, u64Src, 1); \
9033 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9034 \
9035 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9036 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9037 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9038 IEM_MC_REF_EFLAGS(pEFlags); \
9039 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9040 \
9041 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9042 IEM_MC_END(); \
9043 break; \
9044 \
9045 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9046 } \
9047 } \
9048 else \
9049 { \
9050 /* memory destination. */ \
9051 /** @todo test negative bit offsets! */ \
9052 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9053 { \
9054 switch (pVCpu->iem.s.enmEffOpSize) \
9055 { \
9056 case IEMMODE_16BIT: \
9057 IEM_MC_BEGIN(3, 2); \
9058 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9059 IEM_MC_ARG(uint16_t, u16Src, 1); \
9060 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9062 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9063 \
9064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9065 IEMOP_HLP_DONE_DECODING(); \
9066 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9067 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9068 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9069 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9070 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9071 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9072 IEM_MC_FETCH_EFLAGS(EFlags); \
9073 \
9074 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9075 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9076 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
9077 \
9078 IEM_MC_COMMIT_EFLAGS(EFlags); \
9079 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9080 IEM_MC_END(); \
9081 break; \
9082 \
9083 case IEMMODE_32BIT: \
9084 IEM_MC_BEGIN(3, 2); \
9085 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9086 IEM_MC_ARG(uint32_t, u32Src, 1); \
9087 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9089 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9090 \
9091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9092 IEMOP_HLP_DONE_DECODING(); \
9093 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9094 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9095 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9096 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9097 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9098 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9099 IEM_MC_FETCH_EFLAGS(EFlags); \
9100 \
9101 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9102 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9103 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
9104 \
9105 IEM_MC_COMMIT_EFLAGS(EFlags); \
9106 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9107 IEM_MC_END(); \
9108 break; \
9109 \
9110 case IEMMODE_64BIT: \
9111 IEM_MC_BEGIN(3, 2); \
9112 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9113 IEM_MC_ARG(uint64_t, u64Src, 1); \
9114 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9116 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9117 \
9118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9119 IEMOP_HLP_DONE_DECODING(); \
9120 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9121 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9122 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9123 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9124 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9125 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9126 IEM_MC_FETCH_EFLAGS(EFlags); \
9127 \
9128 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9129 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9130 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
9131 \
9132 IEM_MC_COMMIT_EFLAGS(EFlags); \
9133 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9134 IEM_MC_END(); \
9135 break; \
9136 \
9137 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9138 } \
9139 } \
9140 else \
9141 { \
9142 (void)0
9143
9144#define IEMOP_BODY_BIT_Ev_Gv_NO_LOCK() \
9145 IEMOP_HLP_DONE_DECODING(); \
9146 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
9147 } \
9148 } \
9149 (void)0
9150
9151#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9152 switch (pVCpu->iem.s.enmEffOpSize) \
9153 { \
9154 case IEMMODE_16BIT: \
9155 IEM_MC_BEGIN(3, 2); \
9156 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9157 IEM_MC_ARG(uint16_t, u16Src, 1); \
9158 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9160 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9161 \
9162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9163 IEMOP_HLP_DONE_DECODING(); \
9164 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9165 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9166 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9167 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9168 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9169 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9170 IEM_MC_FETCH_EFLAGS(EFlags); \
9171 \
9172 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9173 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9174 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
9175 \
9176 IEM_MC_COMMIT_EFLAGS(EFlags); \
9177 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9178 IEM_MC_END(); \
9179 break; \
9180 \
9181 case IEMMODE_32BIT: \
9182 IEM_MC_BEGIN(3, 2); \
9183 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9184 IEM_MC_ARG(uint32_t, u32Src, 1); \
9185 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9187 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9188 \
9189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9190 IEMOP_HLP_DONE_DECODING(); \
9191 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9192 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9193 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9194 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9195 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9196 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9197 IEM_MC_FETCH_EFLAGS(EFlags); \
9198 \
9199 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9200 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9201 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
9202 \
9203 IEM_MC_COMMIT_EFLAGS(EFlags); \
9204 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9205 IEM_MC_END(); \
9206 break; \
9207 \
9208 case IEMMODE_64BIT: \
9209 IEM_MC_BEGIN(3, 2); \
9210 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9211 IEM_MC_ARG(uint64_t, u64Src, 1); \
9212 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9214 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9215 \
9216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9217 IEMOP_HLP_DONE_DECODING(); \
9218 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9219 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9220 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9221 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9222 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9223 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9224 IEM_MC_FETCH_EFLAGS(EFlags); \
9225 \
9226 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
9227 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9228 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
9229 \
9230 IEM_MC_COMMIT_EFLAGS(EFlags); \
9231 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9232 IEM_MC_END(); \
9233 break; \
9234 \
9235 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9236 } \
9237 } \
9238 } \
9239 (void)0
9240
9241
9242/** Opcode 0x0f 0xa3. */
9243FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9244{
9245 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9246 IEMOP_HLP_MIN_386();
9247 IEMOP_BODY_BIT_Ev_Gv(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
9248 IEMOP_BODY_BIT_Ev_Gv_NO_LOCK();
9249}
9250
9251
9252/**
9253 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9254 */
9255FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9256{
9257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9258 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9259
9260 if (IEM_IS_MODRM_REG_MODE(bRm))
9261 {
9262 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9264
9265 switch (pVCpu->iem.s.enmEffOpSize)
9266 {
9267 case IEMMODE_16BIT:
9268 IEM_MC_BEGIN(4, 0);
9269 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9270 IEM_MC_ARG(uint16_t, u16Src, 1);
9271 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9272 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9273
9274 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9275 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9276 IEM_MC_REF_EFLAGS(pEFlags);
9277 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9278
9279 IEM_MC_ADVANCE_RIP_AND_FINISH();
9280 IEM_MC_END();
9281 break;
9282
9283 case IEMMODE_32BIT:
9284 IEM_MC_BEGIN(4, 0);
9285 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9286 IEM_MC_ARG(uint32_t, u32Src, 1);
9287 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9288 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9289
9290 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9291 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9292 IEM_MC_REF_EFLAGS(pEFlags);
9293 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9294
9295 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9296 IEM_MC_ADVANCE_RIP_AND_FINISH();
9297 IEM_MC_END();
9298 break;
9299
9300 case IEMMODE_64BIT:
9301 IEM_MC_BEGIN(4, 0);
9302 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9303 IEM_MC_ARG(uint64_t, u64Src, 1);
9304 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9305 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9306
9307 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9308 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9309 IEM_MC_REF_EFLAGS(pEFlags);
9310 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9311
9312 IEM_MC_ADVANCE_RIP_AND_FINISH();
9313 IEM_MC_END();
9314 break;
9315
9316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9317 }
9318 }
9319 else
9320 {
9321 switch (pVCpu->iem.s.enmEffOpSize)
9322 {
9323 case IEMMODE_16BIT:
9324 IEM_MC_BEGIN(4, 2);
9325 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9326 IEM_MC_ARG(uint16_t, u16Src, 1);
9327 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9328 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9330
9331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9332 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9333 IEM_MC_ASSIGN(cShiftArg, cShift);
9334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9335 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9336 IEM_MC_FETCH_EFLAGS(EFlags);
9337 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9338 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9339
9340 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9341 IEM_MC_COMMIT_EFLAGS(EFlags);
9342 IEM_MC_ADVANCE_RIP_AND_FINISH();
9343 IEM_MC_END();
9344 break;
9345
9346 case IEMMODE_32BIT:
9347 IEM_MC_BEGIN(4, 2);
9348 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9349 IEM_MC_ARG(uint32_t, u32Src, 1);
9350 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9351 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9353
9354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9355 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9356 IEM_MC_ASSIGN(cShiftArg, cShift);
9357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9359 IEM_MC_FETCH_EFLAGS(EFlags);
9360 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9361 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9362
9363 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9364 IEM_MC_COMMIT_EFLAGS(EFlags);
9365 IEM_MC_ADVANCE_RIP_AND_FINISH();
9366 IEM_MC_END();
9367 break;
9368
9369 case IEMMODE_64BIT:
9370 IEM_MC_BEGIN(4, 2);
9371 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9372 IEM_MC_ARG(uint64_t, u64Src, 1);
9373 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9374 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9376
9377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9378 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9379 IEM_MC_ASSIGN(cShiftArg, cShift);
9380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9381 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9382 IEM_MC_FETCH_EFLAGS(EFlags);
9383 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9384 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9385
9386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9387 IEM_MC_COMMIT_EFLAGS(EFlags);
9388 IEM_MC_ADVANCE_RIP_AND_FINISH();
9389 IEM_MC_END();
9390 break;
9391
9392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9393 }
9394 }
9395}
9396
9397
9398/**
9399 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9400 */
9401FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9402{
9403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9404 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9405
9406 if (IEM_IS_MODRM_REG_MODE(bRm))
9407 {
9408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9409
9410 switch (pVCpu->iem.s.enmEffOpSize)
9411 {
9412 case IEMMODE_16BIT:
9413 IEM_MC_BEGIN(4, 0);
9414 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9415 IEM_MC_ARG(uint16_t, u16Src, 1);
9416 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9417 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9418
9419 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9420 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9421 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9422 IEM_MC_REF_EFLAGS(pEFlags);
9423 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9424
9425 IEM_MC_ADVANCE_RIP_AND_FINISH();
9426 IEM_MC_END();
9427 break;
9428
9429 case IEMMODE_32BIT:
9430 IEM_MC_BEGIN(4, 0);
9431 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9432 IEM_MC_ARG(uint32_t, u32Src, 1);
9433 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9434 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9435
9436 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9437 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9438 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9439 IEM_MC_REF_EFLAGS(pEFlags);
9440 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9441
9442 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9443 IEM_MC_ADVANCE_RIP_AND_FINISH();
9444 IEM_MC_END();
9445 break;
9446
9447 case IEMMODE_64BIT:
9448 IEM_MC_BEGIN(4, 0);
9449 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9450 IEM_MC_ARG(uint64_t, u64Src, 1);
9451 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9452 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9453
9454 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9455 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9456 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9457 IEM_MC_REF_EFLAGS(pEFlags);
9458 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9459
9460 IEM_MC_ADVANCE_RIP_AND_FINISH();
9461 IEM_MC_END();
9462 break;
9463
9464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9465 }
9466 }
9467 else
9468 {
9469 switch (pVCpu->iem.s.enmEffOpSize)
9470 {
9471 case IEMMODE_16BIT:
9472 IEM_MC_BEGIN(4, 2);
9473 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9474 IEM_MC_ARG(uint16_t, u16Src, 1);
9475 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9478
9479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9481 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9482 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9483 IEM_MC_FETCH_EFLAGS(EFlags);
9484 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9485 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9486
9487 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9488 IEM_MC_COMMIT_EFLAGS(EFlags);
9489 IEM_MC_ADVANCE_RIP_AND_FINISH();
9490 IEM_MC_END();
9491 break;
9492
9493 case IEMMODE_32BIT:
9494 IEM_MC_BEGIN(4, 2);
9495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9496 IEM_MC_ARG(uint32_t, u32Src, 1);
9497 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9498 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9500
9501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9503 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9504 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9505 IEM_MC_FETCH_EFLAGS(EFlags);
9506 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9507 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9508
9509 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9510 IEM_MC_COMMIT_EFLAGS(EFlags);
9511 IEM_MC_ADVANCE_RIP_AND_FINISH();
9512 IEM_MC_END();
9513 break;
9514
9515 case IEMMODE_64BIT:
9516 IEM_MC_BEGIN(4, 2);
9517 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9518 IEM_MC_ARG(uint64_t, u64Src, 1);
9519 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9520 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9522
9523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9525 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9526 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9527 IEM_MC_FETCH_EFLAGS(EFlags);
9528 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9529 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9530
9531 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9532 IEM_MC_COMMIT_EFLAGS(EFlags);
9533 IEM_MC_ADVANCE_RIP_AND_FINISH();
9534 IEM_MC_END();
9535 break;
9536
9537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9538 }
9539 }
9540}
9541
9542
9543
9544/** Opcode 0x0f 0xa4. */
9545FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9546{
9547 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9548 IEMOP_HLP_MIN_386();
9549 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9550}
9551
9552
9553/** Opcode 0x0f 0xa5. */
9554FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9555{
9556 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9557 IEMOP_HLP_MIN_386();
9558 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9559}
9560
9561
9562/** Opcode 0x0f 0xa8. */
9563FNIEMOP_DEF(iemOp_push_gs)
9564{
9565 IEMOP_MNEMONIC(push_gs, "push gs");
9566 IEMOP_HLP_MIN_386();
9567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9568 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9569}
9570
9571
9572/** Opcode 0x0f 0xa9. */
9573FNIEMOP_DEF(iemOp_pop_gs)
9574{
9575 IEMOP_MNEMONIC(pop_gs, "pop gs");
9576 IEMOP_HLP_MIN_386();
9577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9578 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9579}
9580
9581
9582/** Opcode 0x0f 0xaa. */
9583FNIEMOP_DEF(iemOp_rsm)
9584{
9585 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9586 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9588 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
9589}
9590
9591
9592
9593/** Opcode 0x0f 0xab. */
9594FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9595{
9596 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9597 IEMOP_HLP_MIN_386();
9598 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
9599 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9600}
9601
9602
9603/** Opcode 0x0f 0xac. */
9604FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9605{
9606 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9607 IEMOP_HLP_MIN_386();
9608 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9609}
9610
9611
9612/** Opcode 0x0f 0xad. */
9613FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9614{
9615 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9616 IEMOP_HLP_MIN_386();
9617 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9618}
9619
9620
9621/** Opcode 0x0f 0xae mem/0. */
9622FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9623{
9624 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9625 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9626 return IEMOP_RAISE_INVALID_OPCODE();
9627
9628 IEM_MC_BEGIN(3, 1);
9629 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9630 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9631 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9634 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9635 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9636 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9637 IEM_MC_END();
9638}
9639
9640
9641/** Opcode 0x0f 0xae mem/1. */
9642FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9643{
9644 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9645 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9646 return IEMOP_RAISE_INVALID_OPCODE();
9647
9648 IEM_MC_BEGIN(3, 1);
9649 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9650 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9651 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9654 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9655 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9656 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9657 IEM_MC_END();
9658}
9659
9660
9661/**
9662 * @opmaps grp15
9663 * @opcode !11/2
9664 * @oppfx none
9665 * @opcpuid sse
9666 * @opgroup og_sse_mxcsrsm
9667 * @opxcpttype 5
9668 * @optest op1=0 -> mxcsr=0
9669 * @optest op1=0x2083 -> mxcsr=0x2083
9670 * @optest op1=0xfffffffe -> value.xcpt=0xd
9671 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9672 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9673 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9674 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9675 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9676 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9677 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9678 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9679 */
9680FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9681{
9682 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9683 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9684 return IEMOP_RAISE_INVALID_OPCODE();
9685
9686 IEM_MC_BEGIN(2, 0);
9687 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9688 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9691 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9692 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9693 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9694 IEM_MC_END();
9695}
9696
9697
9698/**
9699 * @opmaps grp15
9700 * @opcode !11/3
9701 * @oppfx none
9702 * @opcpuid sse
9703 * @opgroup og_sse_mxcsrsm
9704 * @opxcpttype 5
9705 * @optest mxcsr=0 -> op1=0
9706 * @optest mxcsr=0x2083 -> op1=0x2083
9707 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9708 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9709 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9710 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9711 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9712 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9713 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9714 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9715 */
9716FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9717{
9718 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9719 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9720 return IEMOP_RAISE_INVALID_OPCODE();
9721
9722 IEM_MC_BEGIN(2, 0);
9723 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9724 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9727 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9728 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9729 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9730 IEM_MC_END();
9731}
9732
9733
9734/**
9735 * @opmaps grp15
9736 * @opcode !11/4
9737 * @oppfx none
9738 * @opcpuid xsave
9739 * @opgroup og_system
9740 * @opxcpttype none
9741 */
9742FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9743{
9744 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9745 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9746 return IEMOP_RAISE_INVALID_OPCODE();
9747
9748 IEM_MC_BEGIN(3, 0);
9749 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9750 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9751 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9754 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9755 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9756 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9757 IEM_MC_END();
9758}
9759
9760
9761/**
9762 * @opmaps grp15
9763 * @opcode !11/5
9764 * @oppfx none
9765 * @opcpuid xsave
9766 * @opgroup og_system
9767 * @opxcpttype none
9768 */
9769FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9770{
9771 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9772 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9773 return IEMOP_RAISE_INVALID_OPCODE();
9774
9775 IEM_MC_BEGIN(3, 0);
9776 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9777 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9778 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9781 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9782 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9783 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9784 IEM_MC_END();
9785}
9786
9787/** Opcode 0x0f 0xae mem/6. */
9788FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9789
9790/**
9791 * @opmaps grp15
9792 * @opcode !11/7
9793 * @oppfx none
9794 * @opcpuid clfsh
9795 * @opgroup og_cachectl
9796 * @optest op1=1 ->
9797 */
9798FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9799{
9800 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9801 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9802 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9803
9804 IEM_MC_BEGIN(2, 0);
9805 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9806 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9809 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9810 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9811 IEM_MC_END();
9812}
9813
9814/**
9815 * @opmaps grp15
9816 * @opcode !11/7
9817 * @oppfx 0x66
9818 * @opcpuid clflushopt
9819 * @opgroup og_cachectl
9820 * @optest op1=1 ->
9821 */
9822FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9823{
9824 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9825 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9826 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9827
9828 IEM_MC_BEGIN(2, 0);
9829 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9830 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9833 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9834 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9835 IEM_MC_END();
9836}
9837
9838
9839/** Opcode 0x0f 0xae 11b/5. */
9840FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9841{
9842 RT_NOREF_PV(bRm);
9843 IEMOP_MNEMONIC(lfence, "lfence");
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9845 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9846 return IEMOP_RAISE_INVALID_OPCODE();
9847
9848 IEM_MC_BEGIN(0, 0);
9849#ifdef RT_ARCH_ARM64
9850 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9851#else
9852 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9853 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9854 else
9855 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9856#endif
9857 IEM_MC_ADVANCE_RIP_AND_FINISH();
9858 IEM_MC_END();
9859}
9860
9861
9862/** Opcode 0x0f 0xae 11b/6. */
9863FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9864{
9865 RT_NOREF_PV(bRm);
9866 IEMOP_MNEMONIC(mfence, "mfence");
9867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9868 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9869 return IEMOP_RAISE_INVALID_OPCODE();
9870
9871 IEM_MC_BEGIN(0, 0);
9872#ifdef RT_ARCH_ARM64
9873 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9874#else
9875 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9876 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9877 else
9878 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9879#endif
9880 IEM_MC_ADVANCE_RIP_AND_FINISH();
9881 IEM_MC_END();
9882}
9883
9884
9885/** Opcode 0x0f 0xae 11b/7. */
9886FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9887{
9888 RT_NOREF_PV(bRm);
9889 IEMOP_MNEMONIC(sfence, "sfence");
9890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9891 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9892 return IEMOP_RAISE_INVALID_OPCODE();
9893
9894 IEM_MC_BEGIN(0, 0);
9895#ifdef RT_ARCH_ARM64
9896 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9897#else
9898 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9899 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9900 else
9901 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9902#endif
9903 IEM_MC_ADVANCE_RIP_AND_FINISH();
9904 IEM_MC_END();
9905}
9906
9907
9908/** Opcode 0xf3 0x0f 0xae 11b/0. */
9909FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
9910{
9911 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
9912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9913 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9914 {
9915 IEM_MC_BEGIN(1, 0);
9916 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9917 IEM_MC_ARG(uint64_t, u64Dst, 0);
9918 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
9919 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9920 IEM_MC_ADVANCE_RIP_AND_FINISH();
9921 IEM_MC_END();
9922 }
9923 else
9924 {
9925 IEM_MC_BEGIN(1, 0);
9926 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9927 IEM_MC_ARG(uint32_t, u32Dst, 0);
9928 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
9929 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9930 IEM_MC_ADVANCE_RIP_AND_FINISH();
9931 IEM_MC_END();
9932 }
9933}
9934
9935
9936/** Opcode 0xf3 0x0f 0xae 11b/1. */
9937FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
9938{
9939 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
9940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9941 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9942 {
9943 IEM_MC_BEGIN(1, 0);
9944 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9945 IEM_MC_ARG(uint64_t, u64Dst, 0);
9946 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
9947 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
9948 IEM_MC_ADVANCE_RIP_AND_FINISH();
9949 IEM_MC_END();
9950 }
9951 else
9952 {
9953 IEM_MC_BEGIN(1, 0);
9954 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9955 IEM_MC_ARG(uint32_t, u32Dst, 0);
9956 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
9957 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
9958 IEM_MC_ADVANCE_RIP_AND_FINISH();
9959 IEM_MC_END();
9960 }
9961}
9962
9963
9964/** Opcode 0xf3 0x0f 0xae 11b/2. */
9965FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
9966{
9967 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
9968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9969 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9970 {
9971 IEM_MC_BEGIN(1, 0);
9972 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9973 IEM_MC_ARG(uint64_t, u64Dst, 0);
9974 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9975 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
9976 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
9977 IEM_MC_ADVANCE_RIP_AND_FINISH();
9978 IEM_MC_END();
9979 }
9980 else
9981 {
9982 IEM_MC_BEGIN(1, 0);
9983 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
9984 IEM_MC_ARG(uint32_t, u32Dst, 0);
9985 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9986 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
9987 IEM_MC_ADVANCE_RIP_AND_FINISH();
9988 IEM_MC_END();
9989 }
9990}
9991
9992
9993/** Opcode 0xf3 0x0f 0xae 11b/3. */
9994FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
9995{
9996 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
9997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
9998 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
9999 {
10000 IEM_MC_BEGIN(1, 0);
10001 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10002 IEM_MC_ARG(uint64_t, u64Dst, 0);
10003 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10004 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10005 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10006 IEM_MC_ADVANCE_RIP_AND_FINISH();
10007 IEM_MC_END();
10008 }
10009 else
10010 {
10011 IEM_MC_BEGIN(1, 0);
10012 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10013 IEM_MC_ARG(uint32_t, u32Dst, 0);
10014 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10015 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10016 IEM_MC_ADVANCE_RIP_AND_FINISH();
10017 IEM_MC_END();
10018 }
10019}
10020
10021
10022/**
10023 * Group 15 jump table for register variant.
10024 */
10025IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10026{ /* pfx: none, 066h, 0f3h, 0f2h */
10027 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10028 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10029 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10030 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10031 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10032 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10033 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10034 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10035};
10036AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10037
10038
10039/**
10040 * Group 15 jump table for memory variant.
10041 */
10042IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10043{ /* pfx: none, 066h, 0f3h, 0f2h */
10044 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10045 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10046 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10047 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10048 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10049 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10050 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10051 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10052};
10053AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10054
10055
10056/** Opcode 0x0f 0xae. */
10057FNIEMOP_DEF(iemOp_Grp15)
10058{
10059 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10061 if (IEM_IS_MODRM_REG_MODE(bRm))
10062 /* register, register */
10063 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10064 + pVCpu->iem.s.idxPrefix], bRm);
10065 /* memory, register */
10066 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10067 + pVCpu->iem.s.idxPrefix], bRm);
10068}
10069
10070
10071/** Opcode 0x0f 0xaf. */
10072FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10073{
10074 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10075 IEMOP_HLP_MIN_386();
10076 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10077 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10078 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10079}
10080
10081
10082/** Opcode 0x0f 0xb0. */
10083FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10084{
10085 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10086 IEMOP_HLP_MIN_486();
10087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10088
10089 if (IEM_IS_MODRM_REG_MODE(bRm))
10090 {
10091 IEMOP_HLP_DONE_DECODING();
10092 IEM_MC_BEGIN(4, 0);
10093 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10094 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10095 IEM_MC_ARG(uint8_t, u8Src, 2);
10096 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10097
10098 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10099 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10100 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10101 IEM_MC_REF_EFLAGS(pEFlags);
10102 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10103 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10104 else
10105 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10106
10107 IEM_MC_ADVANCE_RIP_AND_FINISH();
10108 IEM_MC_END();
10109 }
10110 else
10111 {
10112 IEM_MC_BEGIN(4, 3);
10113 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10114 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10115 IEM_MC_ARG(uint8_t, u8Src, 2);
10116 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10118 IEM_MC_LOCAL(uint8_t, u8Al);
10119
10120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10121 IEMOP_HLP_DONE_DECODING();
10122 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10123 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10124 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10125 IEM_MC_FETCH_EFLAGS(EFlags);
10126 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10127 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10128 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10129 else
10130 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10131
10132 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10133 IEM_MC_COMMIT_EFLAGS(EFlags);
10134 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10135 IEM_MC_ADVANCE_RIP_AND_FINISH();
10136 IEM_MC_END();
10137 }
10138}
10139
10140/** Opcode 0x0f 0xb1. */
10141FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10142{
10143 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10144 IEMOP_HLP_MIN_486();
10145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10146
10147 if (IEM_IS_MODRM_REG_MODE(bRm))
10148 {
10149 IEMOP_HLP_DONE_DECODING();
10150 switch (pVCpu->iem.s.enmEffOpSize)
10151 {
10152 case IEMMODE_16BIT:
10153 IEM_MC_BEGIN(4, 0);
10154 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10155 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10156 IEM_MC_ARG(uint16_t, u16Src, 2);
10157 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10158
10159 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10160 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10161 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10162 IEM_MC_REF_EFLAGS(pEFlags);
10163 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10164 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10165 else
10166 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10167
10168 IEM_MC_ADVANCE_RIP_AND_FINISH();
10169 IEM_MC_END();
10170 break;
10171
10172 case IEMMODE_32BIT:
10173 IEM_MC_BEGIN(4, 0);
10174 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10175 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10176 IEM_MC_ARG(uint32_t, u32Src, 2);
10177 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10178
10179 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10180 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10181 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10182 IEM_MC_REF_EFLAGS(pEFlags);
10183 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10184 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10185 else
10186 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10187
10188 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10189 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10190 } IEM_MC_ELSE() {
10191 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10192 } IEM_MC_ENDIF();
10193
10194 IEM_MC_ADVANCE_RIP_AND_FINISH();
10195 IEM_MC_END();
10196 break;
10197
10198 case IEMMODE_64BIT:
10199 IEM_MC_BEGIN(4, 0);
10200 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10201 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10202#ifdef RT_ARCH_X86
10203 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10204#else
10205 IEM_MC_ARG(uint64_t, u64Src, 2);
10206#endif
10207 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10208
10209 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10210 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10211 IEM_MC_REF_EFLAGS(pEFlags);
10212#ifdef RT_ARCH_X86
10213 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10214 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10215 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10216 else
10217 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10218#else
10219 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10220 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10222 else
10223 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10224#endif
10225
10226 IEM_MC_ADVANCE_RIP_AND_FINISH();
10227 IEM_MC_END();
10228 break;
10229
10230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10231 }
10232 }
10233 else
10234 {
10235 switch (pVCpu->iem.s.enmEffOpSize)
10236 {
10237 case IEMMODE_16BIT:
10238 IEM_MC_BEGIN(4, 3);
10239 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10240 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10241 IEM_MC_ARG(uint16_t, u16Src, 2);
10242 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10244 IEM_MC_LOCAL(uint16_t, u16Ax);
10245
10246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10247 IEMOP_HLP_DONE_DECODING();
10248 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10249 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10250 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10251 IEM_MC_FETCH_EFLAGS(EFlags);
10252 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10253 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10254 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10255 else
10256 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10257
10258 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10259 IEM_MC_COMMIT_EFLAGS(EFlags);
10260 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10261 IEM_MC_ADVANCE_RIP_AND_FINISH();
10262 IEM_MC_END();
10263 break;
10264
10265 case IEMMODE_32BIT:
10266 IEM_MC_BEGIN(4, 3);
10267 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10268 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10269 IEM_MC_ARG(uint32_t, u32Src, 2);
10270 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10272 IEM_MC_LOCAL(uint32_t, u32Eax);
10273
10274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10275 IEMOP_HLP_DONE_DECODING();
10276 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10277 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10278 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10279 IEM_MC_FETCH_EFLAGS(EFlags);
10280 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10281 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10282 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10283 else
10284 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10285
10286 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10287 IEM_MC_COMMIT_EFLAGS(EFlags);
10288
10289 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10290 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10291 } IEM_MC_ENDIF();
10292
10293 IEM_MC_ADVANCE_RIP_AND_FINISH();
10294 IEM_MC_END();
10295 break;
10296
10297 case IEMMODE_64BIT:
10298 IEM_MC_BEGIN(4, 3);
10299 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10300 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10301#ifdef RT_ARCH_X86
10302 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10303#else
10304 IEM_MC_ARG(uint64_t, u64Src, 2);
10305#endif
10306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10308 IEM_MC_LOCAL(uint64_t, u64Rax);
10309
10310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10311 IEMOP_HLP_DONE_DECODING();
10312 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
10313 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10314 IEM_MC_FETCH_EFLAGS(EFlags);
10315 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10316#ifdef RT_ARCH_X86
10317 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10318 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10319 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10320 else
10321 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10322#else
10323 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10324 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10325 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10326 else
10327 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10328#endif
10329
10330 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10331 IEM_MC_COMMIT_EFLAGS(EFlags);
10332 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10333 IEM_MC_ADVANCE_RIP_AND_FINISH();
10334 IEM_MC_END();
10335 break;
10336
10337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10338 }
10339 }
10340}
10341
10342
10343FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
10344{
10345 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
10346 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
10347
10348 switch (pVCpu->iem.s.enmEffOpSize)
10349 {
10350 case IEMMODE_16BIT:
10351 IEM_MC_BEGIN(5, 1);
10352 IEM_MC_ARG(uint16_t, uSel, 0);
10353 IEM_MC_ARG(uint16_t, offSeg, 1);
10354 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10355 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10356 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10357 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10360 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10361 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
10362 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10363 IEM_MC_END();
10364
10365 case IEMMODE_32BIT:
10366 IEM_MC_BEGIN(5, 1);
10367 IEM_MC_ARG(uint16_t, uSel, 0);
10368 IEM_MC_ARG(uint32_t, offSeg, 1);
10369 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10370 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10371 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10372 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10375 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10376 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
10377 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10378 IEM_MC_END();
10379
10380 case IEMMODE_64BIT:
10381 IEM_MC_BEGIN(5, 1);
10382 IEM_MC_ARG(uint16_t, uSel, 0);
10383 IEM_MC_ARG(uint64_t, offSeg, 1);
10384 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
10385 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
10386 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
10387 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
10388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10390 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
10391 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10392 else
10393 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
10394 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
10395 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
10396 IEM_MC_END();
10397
10398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10399 }
10400}
10401
10402
10403/** Opcode 0x0f 0xb2. */
10404FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10405{
10406 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10407 IEMOP_HLP_MIN_386();
10408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10409 if (IEM_IS_MODRM_REG_MODE(bRm))
10410 return IEMOP_RAISE_INVALID_OPCODE();
10411 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10412}
10413
10414
10415/** Opcode 0x0f 0xb3. */
10416FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10417{
10418 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10419 IEMOP_HLP_MIN_386();
10420 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10421 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10422}
10423
10424
10425/** Opcode 0x0f 0xb4. */
10426FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10427{
10428 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10429 IEMOP_HLP_MIN_386();
10430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10431 if (IEM_IS_MODRM_REG_MODE(bRm))
10432 return IEMOP_RAISE_INVALID_OPCODE();
10433 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10434}
10435
10436
10437/** Opcode 0x0f 0xb5. */
10438FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10439{
10440 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10441 IEMOP_HLP_MIN_386();
10442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10443 if (IEM_IS_MODRM_REG_MODE(bRm))
10444 return IEMOP_RAISE_INVALID_OPCODE();
10445 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10446}
10447
10448
10449/** Opcode 0x0f 0xb6. */
10450FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10451{
10452 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10453 IEMOP_HLP_MIN_386();
10454
10455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10456
10457 /*
10458 * If rm is denoting a register, no more instruction bytes.
10459 */
10460 if (IEM_IS_MODRM_REG_MODE(bRm))
10461 {
10462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10463 switch (pVCpu->iem.s.enmEffOpSize)
10464 {
10465 case IEMMODE_16BIT:
10466 IEM_MC_BEGIN(0, 1);
10467 IEM_MC_LOCAL(uint16_t, u16Value);
10468 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10469 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10470 IEM_MC_ADVANCE_RIP_AND_FINISH();
10471 IEM_MC_END();
10472 break;
10473
10474 case IEMMODE_32BIT:
10475 IEM_MC_BEGIN(0, 1);
10476 IEM_MC_LOCAL(uint32_t, u32Value);
10477 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10478 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10479 IEM_MC_ADVANCE_RIP_AND_FINISH();
10480 IEM_MC_END();
10481 break;
10482
10483 case IEMMODE_64BIT:
10484 IEM_MC_BEGIN(0, 1);
10485 IEM_MC_LOCAL(uint64_t, u64Value);
10486 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10487 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10488 IEM_MC_ADVANCE_RIP_AND_FINISH();
10489 IEM_MC_END();
10490 break;
10491
10492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10493 }
10494 }
10495 else
10496 {
10497 /*
10498 * We're loading a register from memory.
10499 */
10500 switch (pVCpu->iem.s.enmEffOpSize)
10501 {
10502 case IEMMODE_16BIT:
10503 IEM_MC_BEGIN(0, 2);
10504 IEM_MC_LOCAL(uint16_t, u16Value);
10505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10508 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10509 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10510 IEM_MC_ADVANCE_RIP_AND_FINISH();
10511 IEM_MC_END();
10512 break;
10513
10514 case IEMMODE_32BIT:
10515 IEM_MC_BEGIN(0, 2);
10516 IEM_MC_LOCAL(uint32_t, u32Value);
10517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10520 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10521 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10522 IEM_MC_ADVANCE_RIP_AND_FINISH();
10523 IEM_MC_END();
10524 break;
10525
10526 case IEMMODE_64BIT:
10527 IEM_MC_BEGIN(0, 2);
10528 IEM_MC_LOCAL(uint64_t, u64Value);
10529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10532 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10533 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10534 IEM_MC_ADVANCE_RIP_AND_FINISH();
10535 IEM_MC_END();
10536 break;
10537
10538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10539 }
10540 }
10541}
10542
10543
10544/** Opcode 0x0f 0xb7. */
10545FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10546{
10547 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10548 IEMOP_HLP_MIN_386();
10549
10550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10551
10552 /** @todo Not entirely sure how the operand size prefix is handled here,
10553 * assuming that it will be ignored. Would be nice to have a few
10554 * test for this. */
10555 /*
10556 * If rm is denoting a register, no more instruction bytes.
10557 */
10558 if (IEM_IS_MODRM_REG_MODE(bRm))
10559 {
10560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10561 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10562 {
10563 IEM_MC_BEGIN(0, 1);
10564 IEM_MC_LOCAL(uint32_t, u32Value);
10565 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10566 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10567 IEM_MC_ADVANCE_RIP_AND_FINISH();
10568 IEM_MC_END();
10569 }
10570 else
10571 {
10572 IEM_MC_BEGIN(0, 1);
10573 IEM_MC_LOCAL(uint64_t, u64Value);
10574 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10575 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10576 IEM_MC_ADVANCE_RIP_AND_FINISH();
10577 IEM_MC_END();
10578 }
10579 }
10580 else
10581 {
10582 /*
10583 * We're loading a register from memory.
10584 */
10585 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10586 {
10587 IEM_MC_BEGIN(0, 2);
10588 IEM_MC_LOCAL(uint32_t, u32Value);
10589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10592 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10593 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10594 IEM_MC_ADVANCE_RIP_AND_FINISH();
10595 IEM_MC_END();
10596 }
10597 else
10598 {
10599 IEM_MC_BEGIN(0, 2);
10600 IEM_MC_LOCAL(uint64_t, u64Value);
10601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10604 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10605 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10606 IEM_MC_ADVANCE_RIP_AND_FINISH();
10607 IEM_MC_END();
10608 }
10609 }
10610}
10611
10612
10613/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10614FNIEMOP_UD_STUB(iemOp_jmpe);
10615
10616
10617/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10618FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10619{
10620 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10621 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10622 return iemOp_InvalidNeedRM(pVCpu);
10623#ifndef TST_IEM_CHECK_MC
10624# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10625 static const IEMOPBINSIZES s_Native =
10626 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10627# endif
10628 static const IEMOPBINSIZES s_Fallback =
10629 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10630#endif
10631 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10632 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
10633}
10634
10635
10636/**
10637 * @opcode 0xb9
10638 * @opinvalid intel-modrm
10639 * @optest ->
10640 */
10641FNIEMOP_DEF(iemOp_Grp10)
10642{
10643 /*
10644 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10645 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10646 */
10647 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10648 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10649 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10650}
10651
10652
10653/**
10654 * Body for group 8 bit instruction.
10655 */
10656#define IEMOP_BODY_BIT_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
10657 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10658 \
10659 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10660 { \
10661 /* register destination. */ \
10662 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10664 \
10665 switch (pVCpu->iem.s.enmEffOpSize) \
10666 { \
10667 case IEMMODE_16BIT: \
10668 IEM_MC_BEGIN(3, 0); \
10669 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10670 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10671 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10672 \
10673 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10674 IEM_MC_REF_EFLAGS(pEFlags); \
10675 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10676 \
10677 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10678 IEM_MC_END(); \
10679 break; \
10680 \
10681 case IEMMODE_32BIT: \
10682 IEM_MC_BEGIN(3, 0); \
10683 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10684 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10685 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10686 \
10687 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10688 IEM_MC_REF_EFLAGS(pEFlags); \
10689 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10690 \
10691 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
10692 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10693 IEM_MC_END(); \
10694 break; \
10695 \
10696 case IEMMODE_64BIT: \
10697 IEM_MC_BEGIN(3, 0); \
10698 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10699 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10700 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10701 \
10702 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10703 IEM_MC_REF_EFLAGS(pEFlags); \
10704 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10705 \
10706 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10707 IEM_MC_END(); \
10708 break; \
10709 \
10710 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10711 } \
10712 } \
10713 else \
10714 { \
10715 /* memory destination. */ \
10716 /** @todo test negative bit offsets! */ \
10717 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10718 { \
10719 switch (pVCpu->iem.s.enmEffOpSize) \
10720 { \
10721 case IEMMODE_16BIT: \
10722 IEM_MC_BEGIN(3, 1); \
10723 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10724 IEM_MC_ARG(uint16_t, u16Src, 1); \
10725 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10727 \
10728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10729 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10730 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10731 IEMOP_HLP_DONE_DECODING(); \
10732 IEM_MC_FETCH_EFLAGS(EFlags); \
10733 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10734 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10735 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
10736 \
10737 IEM_MC_COMMIT_EFLAGS(EFlags); \
10738 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10739 IEM_MC_END(); \
10740 break; \
10741 \
10742 case IEMMODE_32BIT: \
10743 IEM_MC_BEGIN(3, 1); \
10744 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10745 IEM_MC_ARG(uint32_t, u32Src, 1); \
10746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10748 \
10749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10750 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10751 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10752 IEMOP_HLP_DONE_DECODING(); \
10753 IEM_MC_FETCH_EFLAGS(EFlags); \
10754 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10755 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
10757 \
10758 IEM_MC_COMMIT_EFLAGS(EFlags); \
10759 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10760 IEM_MC_END(); \
10761 break; \
10762 \
10763 case IEMMODE_64BIT: \
10764 IEM_MC_BEGIN(3, 1); \
10765 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10766 IEM_MC_ARG(uint64_t, u64Src, 1); \
10767 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10769 \
10770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10771 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10772 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10773 IEMOP_HLP_DONE_DECODING(); \
10774 IEM_MC_FETCH_EFLAGS(EFlags); \
10775 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10776 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10777 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
10778 \
10779 IEM_MC_COMMIT_EFLAGS(EFlags); \
10780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10781 IEM_MC_END(); \
10782 break; \
10783 \
10784 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10785 } \
10786 } \
10787 else \
10788 { \
10789 (void)0
10790
10791#define IEMOP_BODY_BIT_Ev_Ib_NO_LOCK() \
10792 IEMOP_HLP_DONE_DECODING(); \
10793 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
10794 } \
10795 } \
10796 (void)0
10797
10798#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10799 switch (pVCpu->iem.s.enmEffOpSize) \
10800 { \
10801 case IEMMODE_16BIT: \
10802 IEM_MC_BEGIN(3, 1); \
10803 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10804 IEM_MC_ARG(uint16_t, u16Src, 1); \
10805 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10807 \
10808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10809 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10810 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10811 IEMOP_HLP_DONE_DECODING(); \
10812 IEM_MC_FETCH_EFLAGS(EFlags); \
10813 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10814 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
10816 \
10817 IEM_MC_COMMIT_EFLAGS(EFlags); \
10818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10819 IEM_MC_END(); \
10820 break; \
10821 \
10822 case IEMMODE_32BIT: \
10823 IEM_MC_BEGIN(3, 1); \
10824 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10825 IEM_MC_ARG(uint32_t, u32Src, 1); \
10826 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10828 \
10829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10830 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10831 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10832 IEMOP_HLP_DONE_DECODING(); \
10833 IEM_MC_FETCH_EFLAGS(EFlags); \
10834 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10835 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
10837 \
10838 IEM_MC_COMMIT_EFLAGS(EFlags); \
10839 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10840 IEM_MC_END(); \
10841 break; \
10842 \
10843 case IEMMODE_64BIT: \
10844 IEM_MC_BEGIN(3, 1); \
10845 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10846 IEM_MC_ARG(uint64_t, u64Src, 1); \
10847 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10849 \
10850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10851 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10852 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10853 IEMOP_HLP_DONE_DECODING(); \
10854 IEM_MC_FETCH_EFLAGS(EFlags); \
10855 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); \
10856 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10857 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
10858 \
10859 IEM_MC_COMMIT_EFLAGS(EFlags); \
10860 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10861 IEM_MC_END(); \
10862 break; \
10863 \
10864 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10865 } \
10866 } \
10867 } \
10868 (void)0
10869
10870
10871/** Opcode 0x0f 0xba /4. */
10872FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
10873{
10874 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
10875 IEMOP_BODY_BIT_Ev_Ib(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64, IEM_ACCESS_DATA_R);
10876 IEMOP_BODY_BIT_Ev_Ib_NO_LOCK();
10877}
10878
10879
10880/** Opcode 0x0f 0xba /5. */
10881FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
10882{
10883 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
10884 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64, IEM_ACCESS_DATA_RW);
10885 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
10886}
10887
10888
10889/** Opcode 0x0f 0xba /6. */
10890FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
10891{
10892 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
10893 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64, IEM_ACCESS_DATA_RW);
10894 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10895}
10896
10897
10898/** Opcode 0x0f 0xba /7. */
10899FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
10900{
10901 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
10902 IEMOP_BODY_BIT_Ev_Ib( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10903 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10904}
10905
10906
10907/** Opcode 0x0f 0xba. */
10908FNIEMOP_DEF(iemOp_Grp8)
10909{
10910 IEMOP_HLP_MIN_386();
10911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10912 switch (IEM_GET_MODRM_REG_8(bRm))
10913 {
10914 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
10915 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
10916 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
10917 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
10918
10919 case 0: case 1: case 2: case 3:
10920 /* Both AMD and Intel want full modr/m decoding and imm8. */
10921 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
10922
10923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10924 }
10925}
10926
10927
10928/** Opcode 0x0f 0xbb. */
10929FNIEMOP_DEF(iemOp_btc_Ev_Gv)
10930{
10931 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
10932 IEMOP_HLP_MIN_386();
10933 IEMOP_BODY_BIT_Ev_Gv( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64, IEM_ACCESS_DATA_RW);
10934 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
10935}
10936
10937
10938/**
10939 * Common worker for BSF and BSR instructions.
10940 *
10941 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
10942 * the destination register, which means that for 32-bit operations the high
10943 * bits must be left alone.
10944 *
10945 * @param pImpl Pointer to the instruction implementation (assembly).
10946 */
10947FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
10948{
10949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10950
10951 /*
10952 * If rm is denoting a register, no more instruction bytes.
10953 */
10954 if (IEM_IS_MODRM_REG_MODE(bRm))
10955 {
10956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10957 switch (pVCpu->iem.s.enmEffOpSize)
10958 {
10959 case IEMMODE_16BIT:
10960 IEM_MC_BEGIN(3, 0);
10961 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10962 IEM_MC_ARG(uint16_t, u16Src, 1);
10963 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10964
10965 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10966 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10967 IEM_MC_REF_EFLAGS(pEFlags);
10968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10969
10970 IEM_MC_ADVANCE_RIP_AND_FINISH();
10971 IEM_MC_END();
10972 break;
10973
10974 case IEMMODE_32BIT:
10975 IEM_MC_BEGIN(3, 0);
10976 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10977 IEM_MC_ARG(uint32_t, u32Src, 1);
10978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10979
10980 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10981 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10982 IEM_MC_REF_EFLAGS(pEFlags);
10983 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10984 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10985 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10986 } IEM_MC_ENDIF();
10987 IEM_MC_ADVANCE_RIP_AND_FINISH();
10988 IEM_MC_END();
10989 break;
10990
10991 case IEMMODE_64BIT:
10992 IEM_MC_BEGIN(3, 0);
10993 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10994 IEM_MC_ARG(uint64_t, u64Src, 1);
10995 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10996
10997 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10998 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
10999 IEM_MC_REF_EFLAGS(pEFlags);
11000 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11001
11002 IEM_MC_ADVANCE_RIP_AND_FINISH();
11003 IEM_MC_END();
11004 break;
11005
11006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11007 }
11008 }
11009 else
11010 {
11011 /*
11012 * We're accessing memory.
11013 */
11014 switch (pVCpu->iem.s.enmEffOpSize)
11015 {
11016 case IEMMODE_16BIT:
11017 IEM_MC_BEGIN(3, 1);
11018 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11019 IEM_MC_ARG(uint16_t, u16Src, 1);
11020 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11022
11023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11025 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11026 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11027 IEM_MC_REF_EFLAGS(pEFlags);
11028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11029
11030 IEM_MC_ADVANCE_RIP_AND_FINISH();
11031 IEM_MC_END();
11032 break;
11033
11034 case IEMMODE_32BIT:
11035 IEM_MC_BEGIN(3, 1);
11036 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11037 IEM_MC_ARG(uint32_t, u32Src, 1);
11038 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11040
11041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11043 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11044 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11045 IEM_MC_REF_EFLAGS(pEFlags);
11046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11047
11048 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11049 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11050 } IEM_MC_ENDIF();
11051 IEM_MC_ADVANCE_RIP_AND_FINISH();
11052 IEM_MC_END();
11053 break;
11054
11055 case IEMMODE_64BIT:
11056 IEM_MC_BEGIN(3, 1);
11057 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11058 IEM_MC_ARG(uint64_t, u64Src, 1);
11059 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11061
11062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11064 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11065 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11066 IEM_MC_REF_EFLAGS(pEFlags);
11067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11068
11069 IEM_MC_ADVANCE_RIP_AND_FINISH();
11070 IEM_MC_END();
11071 break;
11072
11073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11074 }
11075 }
11076}
11077
11078
11079/** Opcode 0x0f 0xbc. */
11080FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11081{
11082 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11083 IEMOP_HLP_MIN_386();
11084 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11085 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11086}
11087
11088
11089/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11090FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11091{
11092 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11093 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11094 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11095
11096#ifndef TST_IEM_CHECK_MC
11097 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11098 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11099 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11100 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11101 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11102 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11103 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11104 {
11105 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11106 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11107 };
11108#endif
11109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11110 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11111 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11112 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11113}
11114
11115
11116/** Opcode 0x0f 0xbd. */
11117FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11118{
11119 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11120 IEMOP_HLP_MIN_386();
11121 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11122 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11123}
11124
11125
11126/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11127FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11128{
11129 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11130 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11131 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11132
11133#ifndef TST_IEM_CHECK_MC
11134 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11135 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11136 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11137 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11138 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11139 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11140 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11141 {
11142 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11143 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11144 };
11145#endif
11146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11147 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11148 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11149 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1);
11150}
11151
11152
11153
11154/** Opcode 0x0f 0xbe. */
11155FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11156{
11157 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11158 IEMOP_HLP_MIN_386();
11159
11160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11161
11162 /*
11163 * If rm is denoting a register, no more instruction bytes.
11164 */
11165 if (IEM_IS_MODRM_REG_MODE(bRm))
11166 {
11167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11168 switch (pVCpu->iem.s.enmEffOpSize)
11169 {
11170 case IEMMODE_16BIT:
11171 IEM_MC_BEGIN(0, 1);
11172 IEM_MC_LOCAL(uint16_t, u16Value);
11173 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11174 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11175 IEM_MC_ADVANCE_RIP_AND_FINISH();
11176 IEM_MC_END();
11177 break;
11178
11179 case IEMMODE_32BIT:
11180 IEM_MC_BEGIN(0, 1);
11181 IEM_MC_LOCAL(uint32_t, u32Value);
11182 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11183 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11184 IEM_MC_ADVANCE_RIP_AND_FINISH();
11185 IEM_MC_END();
11186 break;
11187
11188 case IEMMODE_64BIT:
11189 IEM_MC_BEGIN(0, 1);
11190 IEM_MC_LOCAL(uint64_t, u64Value);
11191 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11192 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11193 IEM_MC_ADVANCE_RIP_AND_FINISH();
11194 IEM_MC_END();
11195 break;
11196
11197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11198 }
11199 }
11200 else
11201 {
11202 /*
11203 * We're loading a register from memory.
11204 */
11205 switch (pVCpu->iem.s.enmEffOpSize)
11206 {
11207 case IEMMODE_16BIT:
11208 IEM_MC_BEGIN(0, 2);
11209 IEM_MC_LOCAL(uint16_t, u16Value);
11210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11213 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11214 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11215 IEM_MC_ADVANCE_RIP_AND_FINISH();
11216 IEM_MC_END();
11217 break;
11218
11219 case IEMMODE_32BIT:
11220 IEM_MC_BEGIN(0, 2);
11221 IEM_MC_LOCAL(uint32_t, u32Value);
11222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11225 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11226 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11227 IEM_MC_ADVANCE_RIP_AND_FINISH();
11228 IEM_MC_END();
11229 break;
11230
11231 case IEMMODE_64BIT:
11232 IEM_MC_BEGIN(0, 2);
11233 IEM_MC_LOCAL(uint64_t, u64Value);
11234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11237 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11238 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11239 IEM_MC_ADVANCE_RIP_AND_FINISH();
11240 IEM_MC_END();
11241 break;
11242
11243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11244 }
11245 }
11246}
11247
11248
11249/** Opcode 0x0f 0xbf. */
11250FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11251{
11252 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11253 IEMOP_HLP_MIN_386();
11254
11255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11256
11257 /** @todo Not entirely sure how the operand size prefix is handled here,
11258 * assuming that it will be ignored. Would be nice to have a few
11259 * test for this. */
11260 /*
11261 * If rm is denoting a register, no more instruction bytes.
11262 */
11263 if (IEM_IS_MODRM_REG_MODE(bRm))
11264 {
11265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11266 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11267 {
11268 IEM_MC_BEGIN(0, 1);
11269 IEM_MC_LOCAL(uint32_t, u32Value);
11270 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11271 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11272 IEM_MC_ADVANCE_RIP_AND_FINISH();
11273 IEM_MC_END();
11274 }
11275 else
11276 {
11277 IEM_MC_BEGIN(0, 1);
11278 IEM_MC_LOCAL(uint64_t, u64Value);
11279 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11280 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11281 IEM_MC_ADVANCE_RIP_AND_FINISH();
11282 IEM_MC_END();
11283 }
11284 }
11285 else
11286 {
11287 /*
11288 * We're loading a register from memory.
11289 */
11290 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11291 {
11292 IEM_MC_BEGIN(0, 2);
11293 IEM_MC_LOCAL(uint32_t, u32Value);
11294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11297 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11298 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11299 IEM_MC_ADVANCE_RIP_AND_FINISH();
11300 IEM_MC_END();
11301 }
11302 else
11303 {
11304 IEM_MC_BEGIN(0, 2);
11305 IEM_MC_LOCAL(uint64_t, u64Value);
11306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11309 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11310 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11311 IEM_MC_ADVANCE_RIP_AND_FINISH();
11312 IEM_MC_END();
11313 }
11314 }
11315}
11316
11317
11318/** Opcode 0x0f 0xc0. */
11319FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11320{
11321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11322 IEMOP_HLP_MIN_486();
11323 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11324
11325 /*
11326 * If rm is denoting a register, no more instruction bytes.
11327 */
11328 if (IEM_IS_MODRM_REG_MODE(bRm))
11329 {
11330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11331
11332 IEM_MC_BEGIN(3, 0);
11333 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11334 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11335 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11336
11337 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11338 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11339 IEM_MC_REF_EFLAGS(pEFlags);
11340 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11341
11342 IEM_MC_ADVANCE_RIP_AND_FINISH();
11343 IEM_MC_END();
11344 }
11345 else
11346 {
11347 /*
11348 * We're accessing memory.
11349 */
11350 IEM_MC_BEGIN(3, 3);
11351 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11352 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11353 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11354 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11356
11357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11358 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11359 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11360 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11361 IEM_MC_FETCH_EFLAGS(EFlags);
11362 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11363 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11364 else
11365 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11366
11367 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11368 IEM_MC_COMMIT_EFLAGS(EFlags);
11369 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11370 IEM_MC_ADVANCE_RIP_AND_FINISH();
11371 IEM_MC_END();
11372 }
11373}
11374
11375
11376/** Opcode 0x0f 0xc1. */
11377FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11378{
11379 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11380 IEMOP_HLP_MIN_486();
11381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11382
11383 /*
11384 * If rm is denoting a register, no more instruction bytes.
11385 */
11386 if (IEM_IS_MODRM_REG_MODE(bRm))
11387 {
11388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11389
11390 switch (pVCpu->iem.s.enmEffOpSize)
11391 {
11392 case IEMMODE_16BIT:
11393 IEM_MC_BEGIN(3, 0);
11394 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11395 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11397
11398 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11399 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11400 IEM_MC_REF_EFLAGS(pEFlags);
11401 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11402
11403 IEM_MC_ADVANCE_RIP_AND_FINISH();
11404 IEM_MC_END();
11405 break;
11406
11407 case IEMMODE_32BIT:
11408 IEM_MC_BEGIN(3, 0);
11409 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11410 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11411 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11412
11413 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11414 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11415 IEM_MC_REF_EFLAGS(pEFlags);
11416 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11417
11418 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11419 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11420 IEM_MC_ADVANCE_RIP_AND_FINISH();
11421 IEM_MC_END();
11422 break;
11423
11424 case IEMMODE_64BIT:
11425 IEM_MC_BEGIN(3, 0);
11426 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11427 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11428 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11429
11430 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11431 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11432 IEM_MC_REF_EFLAGS(pEFlags);
11433 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11434
11435 IEM_MC_ADVANCE_RIP_AND_FINISH();
11436 IEM_MC_END();
11437 break;
11438
11439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11440 }
11441 }
11442 else
11443 {
11444 /*
11445 * We're accessing memory.
11446 */
11447 switch (pVCpu->iem.s.enmEffOpSize)
11448 {
11449 case IEMMODE_16BIT:
11450 IEM_MC_BEGIN(3, 3);
11451 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11452 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11453 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11454 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11456
11457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11458 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11459 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11460 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11461 IEM_MC_FETCH_EFLAGS(EFlags);
11462 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11463 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11464 else
11465 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11466
11467 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11468 IEM_MC_COMMIT_EFLAGS(EFlags);
11469 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11470 IEM_MC_ADVANCE_RIP_AND_FINISH();
11471 IEM_MC_END();
11472 break;
11473
11474 case IEMMODE_32BIT:
11475 IEM_MC_BEGIN(3, 3);
11476 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11477 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11478 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11479 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11481
11482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11483 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11484 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11485 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11486 IEM_MC_FETCH_EFLAGS(EFlags);
11487 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11488 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11489 else
11490 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11491
11492 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11493 IEM_MC_COMMIT_EFLAGS(EFlags);
11494 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11495 IEM_MC_ADVANCE_RIP_AND_FINISH();
11496 IEM_MC_END();
11497 break;
11498
11499 case IEMMODE_64BIT:
11500 IEM_MC_BEGIN(3, 3);
11501 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11502 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11503 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11504 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11506
11507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11508 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11509 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11510 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11511 IEM_MC_FETCH_EFLAGS(EFlags);
11512 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11513 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11514 else
11515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11516
11517 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11518 IEM_MC_COMMIT_EFLAGS(EFlags);
11519 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11520 IEM_MC_ADVANCE_RIP_AND_FINISH();
11521 IEM_MC_END();
11522 break;
11523
11524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11525 }
11526 }
11527}
11528
11529
11530/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11531FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11532{
11533 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11534
11535 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11536 if (IEM_IS_MODRM_REG_MODE(bRm))
11537 {
11538 /*
11539 * XMM, XMM.
11540 */
11541 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11543 IEM_MC_BEGIN(4, 2);
11544 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11545 IEM_MC_LOCAL(X86XMMREG, Dst);
11546 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11547 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11548 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11549 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11551 IEM_MC_PREPARE_SSE_USAGE();
11552 IEM_MC_REF_MXCSR(pfMxcsr);
11553 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11554 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11555 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11556 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11557 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11558 } IEM_MC_ELSE() {
11559 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11560 } IEM_MC_ENDIF();
11561
11562 IEM_MC_ADVANCE_RIP_AND_FINISH();
11563 IEM_MC_END();
11564 }
11565 else
11566 {
11567 /*
11568 * XMM, [mem128].
11569 */
11570 IEM_MC_BEGIN(4, 3);
11571 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11572 IEM_MC_LOCAL(X86XMMREG, Dst);
11573 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11574 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11575 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11577
11578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11579 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11580 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11582 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11583 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11584
11585 IEM_MC_PREPARE_SSE_USAGE();
11586 IEM_MC_REF_MXCSR(pfMxcsr);
11587 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11588 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11589 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11590 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11591 } IEM_MC_ELSE() {
11592 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11593 } IEM_MC_ENDIF();
11594
11595 IEM_MC_ADVANCE_RIP_AND_FINISH();
11596 IEM_MC_END();
11597 }
11598}
11599
11600
11601/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11602FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11603{
11604 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11605
11606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11607 if (IEM_IS_MODRM_REG_MODE(bRm))
11608 {
11609 /*
11610 * XMM, XMM.
11611 */
11612 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11614 IEM_MC_BEGIN(4, 2);
11615 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11616 IEM_MC_LOCAL(X86XMMREG, Dst);
11617 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11618 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11619 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11620 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11621 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11622 IEM_MC_PREPARE_SSE_USAGE();
11623 IEM_MC_REF_MXCSR(pfMxcsr);
11624 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11625 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11626 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11627 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11628 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11629 } IEM_MC_ELSE() {
11630 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11631 } IEM_MC_ENDIF();
11632
11633 IEM_MC_ADVANCE_RIP_AND_FINISH();
11634 IEM_MC_END();
11635 }
11636 else
11637 {
11638 /*
11639 * XMM, [mem128].
11640 */
11641 IEM_MC_BEGIN(4, 3);
11642 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11643 IEM_MC_LOCAL(X86XMMREG, Dst);
11644 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11645 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11646 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11648
11649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11650 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11651 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11654 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11655
11656 IEM_MC_PREPARE_SSE_USAGE();
11657 IEM_MC_REF_MXCSR(pfMxcsr);
11658 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11659 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11660 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11661 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11662 } IEM_MC_ELSE() {
11663 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11664 } IEM_MC_ENDIF();
11665
11666 IEM_MC_ADVANCE_RIP_AND_FINISH();
11667 IEM_MC_END();
11668 }
11669}
11670
11671
11672/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11673FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11674{
11675 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11676
11677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11678 if (IEM_IS_MODRM_REG_MODE(bRm))
11679 {
11680 /*
11681 * XMM32, XMM32.
11682 */
11683 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11685 IEM_MC_BEGIN(4, 2);
11686 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11687 IEM_MC_LOCAL(X86XMMREG, Dst);
11688 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11689 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11690 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11691 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11692 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11693 IEM_MC_PREPARE_SSE_USAGE();
11694 IEM_MC_REF_MXCSR(pfMxcsr);
11695 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11696 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11697 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11698 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11699 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11700 } IEM_MC_ELSE() {
11701 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11702 } IEM_MC_ENDIF();
11703
11704 IEM_MC_ADVANCE_RIP_AND_FINISH();
11705 IEM_MC_END();
11706 }
11707 else
11708 {
11709 /*
11710 * XMM32, [mem32].
11711 */
11712 IEM_MC_BEGIN(4, 3);
11713 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11714 IEM_MC_LOCAL(X86XMMREG, Dst);
11715 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11716 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11717 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11719
11720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11721 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11722 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11724 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11725 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11726
11727 IEM_MC_PREPARE_SSE_USAGE();
11728 IEM_MC_REF_MXCSR(pfMxcsr);
11729 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11730 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11731 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11732 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11733 } IEM_MC_ELSE() {
11734 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11735 } IEM_MC_ENDIF();
11736
11737 IEM_MC_ADVANCE_RIP_AND_FINISH();
11738 IEM_MC_END();
11739 }
11740}
11741
11742
11743/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11744FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11745{
11746 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11747
11748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11749 if (IEM_IS_MODRM_REG_MODE(bRm))
11750 {
11751 /*
11752 * XMM64, XMM64.
11753 */
11754 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11756 IEM_MC_BEGIN(4, 2);
11757 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11758 IEM_MC_LOCAL(X86XMMREG, Dst);
11759 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11760 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11761 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11762 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11763 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11764 IEM_MC_PREPARE_SSE_USAGE();
11765 IEM_MC_REF_MXCSR(pfMxcsr);
11766 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11767 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11768 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11769 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11770 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11771 } IEM_MC_ELSE() {
11772 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11773 } IEM_MC_ENDIF();
11774
11775 IEM_MC_ADVANCE_RIP_AND_FINISH();
11776 IEM_MC_END();
11777 }
11778 else
11779 {
11780 /*
11781 * XMM64, [mem64].
11782 */
11783 IEM_MC_BEGIN(4, 3);
11784 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11785 IEM_MC_LOCAL(X86XMMREG, Dst);
11786 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11787 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11788 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11790
11791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11792 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11793 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11795 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11796 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11797
11798 IEM_MC_PREPARE_SSE_USAGE();
11799 IEM_MC_REF_MXCSR(pfMxcsr);
11800 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11801 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11802 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11803 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11804 } IEM_MC_ELSE() {
11805 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11806 } IEM_MC_ENDIF();
11807
11808 IEM_MC_ADVANCE_RIP_AND_FINISH();
11809 IEM_MC_END();
11810 }
11811}
11812
11813
11814/** Opcode 0x0f 0xc3. */
11815FNIEMOP_DEF(iemOp_movnti_My_Gy)
11816{
11817 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
11818
11819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11820
11821 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
11822 if (IEM_IS_MODRM_MEM_MODE(bRm))
11823 {
11824 switch (pVCpu->iem.s.enmEffOpSize)
11825 {
11826 case IEMMODE_32BIT:
11827 IEM_MC_BEGIN(0, 2);
11828 IEM_MC_LOCAL(uint32_t, u32Value);
11829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11830
11831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11833 IEMOP_HLP_RAISE_UD_IF_MISSING_GUEST_FEATURE(pVCpu, fSse2);
11834
11835 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11836 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11837 IEM_MC_ADVANCE_RIP_AND_FINISH();
11838 IEM_MC_END();
11839 break;
11840
11841 case IEMMODE_64BIT:
11842 IEM_MC_BEGIN(0, 2);
11843 IEM_MC_LOCAL(uint64_t, u64Value);
11844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11845
11846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11848 IEMOP_HLP_RAISE_UD_IF_MISSING_GUEST_FEATURE(pVCpu, fSse2);
11849
11850 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
11851 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11852 IEM_MC_ADVANCE_RIP_AND_FINISH();
11853 IEM_MC_END();
11854 break;
11855
11856 case IEMMODE_16BIT:
11857 /** @todo check this form. */
11858 return IEMOP_RAISE_INVALID_OPCODE();
11859
11860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11861 }
11862 }
11863 else
11864 return IEMOP_RAISE_INVALID_OPCODE();
11865}
11866
11867
11868/* Opcode 0x66 0x0f 0xc3 - invalid */
11869/* Opcode 0xf3 0x0f 0xc3 - invalid */
11870/* Opcode 0xf2 0x0f 0xc3 - invalid */
11871
11872
11873/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
11874FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
11875{
11876 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
11877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11878 if (IEM_IS_MODRM_REG_MODE(bRm))
11879 {
11880 /*
11881 * Register, register.
11882 */
11883 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11885 IEM_MC_BEGIN(3, 0);
11886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11887 IEM_MC_ARG(uint16_t, u16Src, 1);
11888 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11889 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11890 IEM_MC_PREPARE_FPU_USAGE();
11891 IEM_MC_FPU_TO_MMX_MODE();
11892 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11893 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11894 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11895 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11896 IEM_MC_ADVANCE_RIP_AND_FINISH();
11897 IEM_MC_END();
11898 }
11899 else
11900 {
11901 /*
11902 * Register, memory.
11903 */
11904 IEM_MC_BEGIN(3, 1);
11905 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11906 IEM_MC_ARG(uint16_t, u16Src, 1);
11907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11908
11909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11910 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11911 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11913 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11914 IEM_MC_PREPARE_FPU_USAGE();
11915 IEM_MC_FPU_TO_MMX_MODE();
11916
11917 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11918 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
11919 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
11920 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
11921 IEM_MC_ADVANCE_RIP_AND_FINISH();
11922 IEM_MC_END();
11923 }
11924}
11925
11926
11927/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
11928FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
11929{
11930 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11932 if (IEM_IS_MODRM_REG_MODE(bRm))
11933 {
11934 /*
11935 * Register, register.
11936 */
11937 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11939 IEM_MC_BEGIN(3, 0);
11940 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11941 IEM_MC_ARG(uint16_t, u16Src, 1);
11942 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11944 IEM_MC_PREPARE_SSE_USAGE();
11945 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11946 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11947 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11948 IEM_MC_ADVANCE_RIP_AND_FINISH();
11949 IEM_MC_END();
11950 }
11951 else
11952 {
11953 /*
11954 * Register, memory.
11955 */
11956 IEM_MC_BEGIN(3, 2);
11957 IEM_MC_ARG(PRTUINT128U, puDst, 0);
11958 IEM_MC_ARG(uint16_t, u16Src, 1);
11959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11960
11961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11962 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11963 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11965 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11966 IEM_MC_PREPARE_SSE_USAGE();
11967
11968 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11969 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
11970 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
11971 IEM_MC_ADVANCE_RIP_AND_FINISH();
11972 IEM_MC_END();
11973 }
11974}
11975
11976
11977/* Opcode 0xf3 0x0f 0xc4 - invalid */
11978/* Opcode 0xf2 0x0f 0xc4 - invalid */
11979
11980
11981/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
11982FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
11983{
11984 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
11985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11986 if (IEM_IS_MODRM_REG_MODE(bRm))
11987 {
11988 /*
11989 * Greg32, MMX, imm8.
11990 */
11991 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
11993 IEM_MC_BEGIN(3, 1);
11994 IEM_MC_LOCAL(uint16_t, u16Dst);
11995 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
11996 IEM_MC_ARG(uint64_t, u64Src, 1);
11997 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
11998 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11999 IEM_MC_PREPARE_FPU_USAGE();
12000 IEM_MC_FPU_TO_MMX_MODE();
12001 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12002 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12003 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12004 IEM_MC_ADVANCE_RIP_AND_FINISH();
12005 IEM_MC_END();
12006 }
12007 /* No memory operand. */
12008 else
12009 return IEMOP_RAISE_INVALID_OPCODE();
12010}
12011
12012
12013/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12014FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12015{
12016 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12017 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12018 if (IEM_IS_MODRM_REG_MODE(bRm))
12019 {
12020 /*
12021 * Greg32, XMM, imm8.
12022 */
12023 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12025 IEM_MC_BEGIN(3, 1);
12026 IEM_MC_LOCAL(uint16_t, u16Dst);
12027 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12028 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12029 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12030 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12031 IEM_MC_PREPARE_SSE_USAGE();
12032 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12033 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12034 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12035 IEM_MC_ADVANCE_RIP_AND_FINISH();
12036 IEM_MC_END();
12037 }
12038 /* No memory operand. */
12039 else
12040 return IEMOP_RAISE_INVALID_OPCODE();
12041}
12042
12043
12044/* Opcode 0xf3 0x0f 0xc5 - invalid */
12045/* Opcode 0xf2 0x0f 0xc5 - invalid */
12046
12047
12048/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12049FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12050{
12051 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12053 if (IEM_IS_MODRM_REG_MODE(bRm))
12054 {
12055 /*
12056 * XMM, XMM, imm8.
12057 */
12058 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12060 IEM_MC_BEGIN(3, 0);
12061 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12062 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12063 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12064 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12065 IEM_MC_PREPARE_SSE_USAGE();
12066 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12067 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12068 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12069 IEM_MC_ADVANCE_RIP_AND_FINISH();
12070 IEM_MC_END();
12071 }
12072 else
12073 {
12074 /*
12075 * XMM, [mem128], imm8.
12076 */
12077 IEM_MC_BEGIN(3, 2);
12078 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12079 IEM_MC_LOCAL(RTUINT128U, uSrc);
12080 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12082
12083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12084 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12085 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12087 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12088 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12089
12090 IEM_MC_PREPARE_SSE_USAGE();
12091 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12092 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12093
12094 IEM_MC_ADVANCE_RIP_AND_FINISH();
12095 IEM_MC_END();
12096 }
12097}
12098
12099
12100/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12101FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12102{
12103 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12105 if (IEM_IS_MODRM_REG_MODE(bRm))
12106 {
12107 /*
12108 * XMM, XMM, imm8.
12109 */
12110 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12112 IEM_MC_BEGIN(3, 0);
12113 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12114 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12115 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12117 IEM_MC_PREPARE_SSE_USAGE();
12118 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12119 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12120 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12121 IEM_MC_ADVANCE_RIP_AND_FINISH();
12122 IEM_MC_END();
12123 }
12124 else
12125 {
12126 /*
12127 * XMM, [mem128], imm8.
12128 */
12129 IEM_MC_BEGIN(3, 2);
12130 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12131 IEM_MC_LOCAL(RTUINT128U, uSrc);
12132 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12134
12135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12136 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12137 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12139 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12140 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12141
12142 IEM_MC_PREPARE_SSE_USAGE();
12143 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12144 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12145
12146 IEM_MC_ADVANCE_RIP_AND_FINISH();
12147 IEM_MC_END();
12148 }
12149}
12150
12151
12152/* Opcode 0xf3 0x0f 0xc6 - invalid */
12153/* Opcode 0xf2 0x0f 0xc6 - invalid */
12154
12155
12156/** Opcode 0x0f 0xc7 !11/1. */
12157FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12158{
12159 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12160
12161 IEM_MC_BEGIN(4, 3);
12162 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12163 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12164 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12165 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12166 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12167 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12169
12170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12171 IEMOP_HLP_DONE_DECODING();
12172 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12173
12174 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12175 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12176 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12177
12178 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12179 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12180 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12181
12182 IEM_MC_FETCH_EFLAGS(EFlags);
12183 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12184 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12185 else
12186 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12187
12188 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
12189 IEM_MC_COMMIT_EFLAGS(EFlags);
12190 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12191 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12192 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12193 } IEM_MC_ENDIF();
12194 IEM_MC_ADVANCE_RIP_AND_FINISH();
12195
12196 IEM_MC_END();
12197}
12198
12199
12200/** Opcode REX.W 0x0f 0xc7 !11/1. */
12201FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12202{
12203 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12204 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12205 {
12206 IEM_MC_BEGIN(4, 3);
12207 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12208 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12209 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12210 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12211 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12212 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12214
12215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12216 IEMOP_HLP_DONE_DECODING();
12217 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12218 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12219
12220 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12221 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12222 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12223
12224 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12225 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12226 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12227
12228 IEM_MC_FETCH_EFLAGS(EFlags);
12229
12230#ifdef RT_ARCH_AMD64 /* some code duplication here because IEMAllInstructionsPython.py cannot parse if/else/#if spaghetti. */
12231 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
12232 {
12233 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12234 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12235 else
12236 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12237 }
12238 else
12239 { /* (see comments in #else case below) */
12240 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12241 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12242 else
12243 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12244 }
12245
12246#elif defined(RT_ARCH_ARM64)
12247 /** @todo may require fallback for unaligned accesses... */
12248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12250 else
12251 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12252
12253#else
12254 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12255 accesses and not all all atomic, which works fine on in UNI CPU guest
12256 configuration (ignoring DMA). If guest SMP is active we have no choice
12257 but to use a rendezvous callback here. Sigh. */
12258 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12259 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12260 else
12261 {
12262 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12263 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12264 }
12265#endif
12266
12267 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12268 IEM_MC_COMMIT_EFLAGS(EFlags);
12269 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12270 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12271 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12272 } IEM_MC_ENDIF();
12273 IEM_MC_ADVANCE_RIP_AND_FINISH();
12274
12275 IEM_MC_END();
12276 }
12277 Log(("cmpxchg16b -> #UD\n"));
12278 return IEMOP_RAISE_INVALID_OPCODE();
12279}
12280
12281FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12282{
12283 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12284 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12285 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12286}
12287
12288
12289/** Opcode 0x0f 0xc7 11/6. */
12290FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12291{
12292 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12293 return IEMOP_RAISE_INVALID_OPCODE();
12294
12295 if (IEM_IS_MODRM_REG_MODE(bRm))
12296 {
12297 /* register destination. */
12298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12299 switch (pVCpu->iem.s.enmEffOpSize)
12300 {
12301 case IEMMODE_16BIT:
12302 IEM_MC_BEGIN(2, 0);
12303 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12304 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12305
12306 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12307 IEM_MC_REF_EFLAGS(pEFlags);
12308 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12309 pu16Dst, pEFlags);
12310
12311 IEM_MC_ADVANCE_RIP_AND_FINISH();
12312 IEM_MC_END();
12313 break;
12314
12315 case IEMMODE_32BIT:
12316 IEM_MC_BEGIN(2, 0);
12317 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12318 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12319
12320 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12321 IEM_MC_REF_EFLAGS(pEFlags);
12322 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12323 pu32Dst, pEFlags);
12324
12325 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12326 IEM_MC_ADVANCE_RIP_AND_FINISH();
12327 IEM_MC_END();
12328 break;
12329
12330 case IEMMODE_64BIT:
12331 IEM_MC_BEGIN(2, 0);
12332 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12333 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12334
12335 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12336 IEM_MC_REF_EFLAGS(pEFlags);
12337 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12338 pu64Dst, pEFlags);
12339
12340 IEM_MC_ADVANCE_RIP_AND_FINISH();
12341 IEM_MC_END();
12342 break;
12343
12344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12345 }
12346 }
12347 /* Register only. */
12348 else
12349 return IEMOP_RAISE_INVALID_OPCODE();
12350}
12351
12352/** Opcode 0x0f 0xc7 !11/6. */
12353#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12354FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12355{
12356 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12357 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12358 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12359 IEM_MC_BEGIN(2, 0);
12360 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12361 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12363 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12364 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12365 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12366 IEM_MC_END();
12367}
12368#else
12369FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12370#endif
12371
12372/** Opcode 0x66 0x0f 0xc7 !11/6. */
12373#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12374FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12375{
12376 IEMOP_MNEMONIC(vmclear, "vmclear");
12377 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12378 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12379 IEM_MC_BEGIN(2, 0);
12380 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12381 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12383 IEMOP_HLP_DONE_DECODING();
12384 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12385 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12386 IEM_MC_END();
12387}
12388#else
12389FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12390#endif
12391
12392/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12393#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12394FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12395{
12396 IEMOP_MNEMONIC(vmxon, "vmxon");
12397 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12398 IEM_MC_BEGIN(2, 0);
12399 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12400 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12402 IEMOP_HLP_DONE_DECODING();
12403 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12404 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12405 IEM_MC_END();
12406}
12407#else
12408FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12409#endif
12410
12411/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12412#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12413FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12414{
12415 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12416 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12417 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12418 IEM_MC_BEGIN(2, 0);
12419 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12420 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12422 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12423 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12424 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12425 IEM_MC_END();
12426}
12427#else
12428FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12429#endif
12430
12431/** Opcode 0x0f 0xc7 11/7. */
12432FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12433{
12434 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12435 return IEMOP_RAISE_INVALID_OPCODE();
12436
12437 if (IEM_IS_MODRM_REG_MODE(bRm))
12438 {
12439 /* register destination. */
12440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12441 switch (pVCpu->iem.s.enmEffOpSize)
12442 {
12443 case IEMMODE_16BIT:
12444 IEM_MC_BEGIN(2, 0);
12445 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12446 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12447
12448 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12449 IEM_MC_REF_EFLAGS(pEFlags);
12450 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12451 pu16Dst, pEFlags);
12452
12453 IEM_MC_ADVANCE_RIP_AND_FINISH();
12454 IEM_MC_END();
12455 break;
12456
12457 case IEMMODE_32BIT:
12458 IEM_MC_BEGIN(2, 0);
12459 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12460 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12461
12462 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12463 IEM_MC_REF_EFLAGS(pEFlags);
12464 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12465 pu32Dst, pEFlags);
12466
12467 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12468 IEM_MC_ADVANCE_RIP_AND_FINISH();
12469 IEM_MC_END();
12470 break;
12471
12472 case IEMMODE_64BIT:
12473 IEM_MC_BEGIN(2, 0);
12474 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12475 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12476
12477 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12478 IEM_MC_REF_EFLAGS(pEFlags);
12479 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12480 pu64Dst, pEFlags);
12481
12482 IEM_MC_ADVANCE_RIP_AND_FINISH();
12483 IEM_MC_END();
12484 break;
12485
12486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12487 }
12488 }
12489 /* Register only. */
12490 else
12491 return IEMOP_RAISE_INVALID_OPCODE();
12492}
12493
12494/**
12495 * Group 9 jump table for register variant.
12496 */
12497IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12498{ /* pfx: none, 066h, 0f3h, 0f2h */
12499 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12500 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12501 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12502 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12503 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12504 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12505 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12506 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12507};
12508AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12509
12510
12511/**
12512 * Group 9 jump table for memory variant.
12513 */
12514IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12515{ /* pfx: none, 066h, 0f3h, 0f2h */
12516 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12517 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12518 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12519 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12520 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12521 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12522 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12523 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12524};
12525AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12526
12527
12528/** Opcode 0x0f 0xc7. */
12529FNIEMOP_DEF(iemOp_Grp9)
12530{
12531 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
12532 if (IEM_IS_MODRM_REG_MODE(bRm))
12533 /* register, register */
12534 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12535 + pVCpu->iem.s.idxPrefix], bRm);
12536 /* memory, register */
12537 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12538 + pVCpu->iem.s.idxPrefix], bRm);
12539}
12540
12541
12542/**
12543 * Common 'bswap register' helper.
12544 */
12545FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12546{
12547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12548 switch (pVCpu->iem.s.enmEffOpSize)
12549 {
12550 case IEMMODE_16BIT:
12551 IEM_MC_BEGIN(1, 0);
12552 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12553 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12554 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12555 IEM_MC_ADVANCE_RIP_AND_FINISH();
12556 IEM_MC_END();
12557 break;
12558
12559 case IEMMODE_32BIT:
12560 IEM_MC_BEGIN(1, 0);
12561 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12562 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12563 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12564 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12565 IEM_MC_ADVANCE_RIP_AND_FINISH();
12566 IEM_MC_END();
12567 break;
12568
12569 case IEMMODE_64BIT:
12570 IEM_MC_BEGIN(1, 0);
12571 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12572 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12573 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12574 IEM_MC_ADVANCE_RIP_AND_FINISH();
12575 IEM_MC_END();
12576 break;
12577
12578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12579 }
12580}
12581
12582
12583/** Opcode 0x0f 0xc8. */
12584FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12585{
12586 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12587 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12588 prefix. REX.B is the correct prefix it appears. For a parallel
12589 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12590 IEMOP_HLP_MIN_486();
12591 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12592}
12593
12594
12595/** Opcode 0x0f 0xc9. */
12596FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12597{
12598 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12599 IEMOP_HLP_MIN_486();
12600 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12601}
12602
12603
12604/** Opcode 0x0f 0xca. */
12605FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12606{
12607 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12608 IEMOP_HLP_MIN_486();
12609 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12610}
12611
12612
12613/** Opcode 0x0f 0xcb. */
12614FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12615{
12616 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12617 IEMOP_HLP_MIN_486();
12618 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12619}
12620
12621
12622/** Opcode 0x0f 0xcc. */
12623FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12624{
12625 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12626 IEMOP_HLP_MIN_486();
12627 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12628}
12629
12630
12631/** Opcode 0x0f 0xcd. */
12632FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12633{
12634 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12635 IEMOP_HLP_MIN_486();
12636 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12637}
12638
12639
12640/** Opcode 0x0f 0xce. */
12641FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12642{
12643 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12644 IEMOP_HLP_MIN_486();
12645 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12646}
12647
12648
12649/** Opcode 0x0f 0xcf. */
12650FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12651{
12652 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12653 IEMOP_HLP_MIN_486();
12654 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12655}
12656
12657
12658/* Opcode 0x0f 0xd0 - invalid */
12659
12660
12661/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12662FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12663{
12664 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12665 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12666}
12667
12668
12669/* Opcode 0xf3 0x0f 0xd0 - invalid */
12670
12671
12672/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12673FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12674{
12675 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12676 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12677}
12678
12679
12680
12681/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12682FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12683{
12684 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12685 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12686}
12687
12688/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12689FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12690{
12691 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12692 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12693}
12694
12695/* Opcode 0xf3 0x0f 0xd1 - invalid */
12696/* Opcode 0xf2 0x0f 0xd1 - invalid */
12697
12698/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12699FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12700{
12701 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12702 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12703}
12704
12705
12706/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12707FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12708{
12709 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12710 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12711}
12712
12713
12714/* Opcode 0xf3 0x0f 0xd2 - invalid */
12715/* Opcode 0xf2 0x0f 0xd2 - invalid */
12716
12717/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12718FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12719{
12720 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12721 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12722}
12723
12724
12725/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12726FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12727{
12728 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12729 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12730}
12731
12732
12733/* Opcode 0xf3 0x0f 0xd3 - invalid */
12734/* Opcode 0xf2 0x0f 0xd3 - invalid */
12735
12736
12737/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12738FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12739{
12740 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12741 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12742}
12743
12744
12745/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12746FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12747{
12748 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12749 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12750}
12751
12752
12753/* Opcode 0xf3 0x0f 0xd4 - invalid */
12754/* Opcode 0xf2 0x0f 0xd4 - invalid */
12755
12756/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12757FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12758{
12759 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12760 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12761}
12762
12763/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12764FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12765{
12766 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12767 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12768}
12769
12770
12771/* Opcode 0xf3 0x0f 0xd5 - invalid */
12772/* Opcode 0xf2 0x0f 0xd5 - invalid */
12773
12774/* Opcode 0x0f 0xd6 - invalid */
12775
12776/**
12777 * @opcode 0xd6
12778 * @oppfx 0x66
12779 * @opcpuid sse2
12780 * @opgroup og_sse2_pcksclr_datamove
12781 * @opxcpttype none
12782 * @optest op1=-1 op2=2 -> op1=2
12783 * @optest op1=0 op2=-42 -> op1=-42
12784 */
12785FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12786{
12787 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12789 if (IEM_IS_MODRM_REG_MODE(bRm))
12790 {
12791 /*
12792 * Register, register.
12793 */
12794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12795 IEM_MC_BEGIN(0, 2);
12796 IEM_MC_LOCAL(uint64_t, uSrc);
12797
12798 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12799 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12800
12801 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12802 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12803
12804 IEM_MC_ADVANCE_RIP_AND_FINISH();
12805 IEM_MC_END();
12806 }
12807 else
12808 {
12809 /*
12810 * Memory, register.
12811 */
12812 IEM_MC_BEGIN(0, 2);
12813 IEM_MC_LOCAL(uint64_t, uSrc);
12814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12815
12816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12818 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12819 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12820
12821 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12822 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12823
12824 IEM_MC_ADVANCE_RIP_AND_FINISH();
12825 IEM_MC_END();
12826 }
12827}
12828
12829
12830/**
12831 * @opcode 0xd6
12832 * @opcodesub 11 mr/reg
12833 * @oppfx f3
12834 * @opcpuid sse2
12835 * @opgroup og_sse2_simdint_datamove
12836 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12837 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12838 */
12839FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12840{
12841 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12842 if (IEM_IS_MODRM_REG_MODE(bRm))
12843 {
12844 /*
12845 * Register, register.
12846 */
12847 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12849 IEM_MC_BEGIN(0, 1);
12850 IEM_MC_LOCAL(uint64_t, uSrc);
12851
12852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12853 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12854 IEM_MC_FPU_TO_MMX_MODE();
12855
12856 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
12857 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
12858
12859 IEM_MC_ADVANCE_RIP_AND_FINISH();
12860 IEM_MC_END();
12861 }
12862
12863 /**
12864 * @opdone
12865 * @opmnemonic udf30fd6mem
12866 * @opcode 0xd6
12867 * @opcodesub !11 mr/reg
12868 * @oppfx f3
12869 * @opunused intel-modrm
12870 * @opcpuid sse
12871 * @optest ->
12872 */
12873 else
12874 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12875}
12876
12877
12878/**
12879 * @opcode 0xd6
12880 * @opcodesub 11 mr/reg
12881 * @oppfx f2
12882 * @opcpuid sse2
12883 * @opgroup og_sse2_simdint_datamove
12884 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12885 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12886 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
12887 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
12888 * @optest op1=-42 op2=0xfedcba9876543210
12889 * -> op1=0xfedcba9876543210 ftw=0xff
12890 */
12891FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
12892{
12893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12894 if (IEM_IS_MODRM_REG_MODE(bRm))
12895 {
12896 /*
12897 * Register, register.
12898 */
12899 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12901 IEM_MC_BEGIN(0, 1);
12902 IEM_MC_LOCAL(uint64_t, uSrc);
12903
12904 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12905 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12906 IEM_MC_FPU_TO_MMX_MODE();
12907
12908 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
12909 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
12910
12911 IEM_MC_ADVANCE_RIP_AND_FINISH();
12912 IEM_MC_END();
12913 }
12914
12915 /**
12916 * @opdone
12917 * @opmnemonic udf20fd6mem
12918 * @opcode 0xd6
12919 * @opcodesub !11 mr/reg
12920 * @oppfx f2
12921 * @opunused intel-modrm
12922 * @opcpuid sse
12923 * @optest ->
12924 */
12925 else
12926 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
12927}
12928
12929
12930/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
12931FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
12932{
12933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12934 /* Docs says register only. */
12935 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12936 {
12937 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12938 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
12939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12940 IEM_MC_BEGIN(2, 0);
12941 IEM_MC_ARG(uint64_t *, puDst, 0);
12942 IEM_MC_ARG(uint64_t const *, puSrc, 1);
12943 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12944 IEM_MC_PREPARE_FPU_USAGE();
12945 IEM_MC_FPU_TO_MMX_MODE();
12946
12947 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12948 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
12949 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
12950
12951 IEM_MC_ADVANCE_RIP_AND_FINISH();
12952 IEM_MC_END();
12953 }
12954 else
12955 return IEMOP_RAISE_INVALID_OPCODE();
12956}
12957
12958
12959/** Opcode 0x66 0x0f 0xd7 - */
12960FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
12961{
12962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12963 /* Docs says register only. */
12964 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
12965 {
12966 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
12967 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
12968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12969 IEM_MC_BEGIN(2, 0);
12970 IEM_MC_ARG(uint64_t *, puDst, 0);
12971 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12972 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12973 IEM_MC_PREPARE_SSE_USAGE();
12974 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12975 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12976 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
12977 IEM_MC_ADVANCE_RIP_AND_FINISH();
12978 IEM_MC_END();
12979 }
12980 else
12981 return IEMOP_RAISE_INVALID_OPCODE();
12982}
12983
12984
12985/* Opcode 0xf3 0x0f 0xd7 - invalid */
12986/* Opcode 0xf2 0x0f 0xd7 - invalid */
12987
12988
12989/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
12990FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
12991{
12992 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12993 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
12994}
12995
12996
12997/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
12998FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
12999{
13000 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13001 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13002}
13003
13004
13005/* Opcode 0xf3 0x0f 0xd8 - invalid */
13006/* Opcode 0xf2 0x0f 0xd8 - invalid */
13007
13008/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13009FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13010{
13011 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13012 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13013}
13014
13015
13016/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13017FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13018{
13019 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13020 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13021}
13022
13023
13024/* Opcode 0xf3 0x0f 0xd9 - invalid */
13025/* Opcode 0xf2 0x0f 0xd9 - invalid */
13026
13027/** Opcode 0x0f 0xda - pminub Pq, Qq */
13028FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13029{
13030 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13031 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13032}
13033
13034
13035/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13036FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13037{
13038 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13039 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13040}
13041
13042/* Opcode 0xf3 0x0f 0xda - invalid */
13043/* Opcode 0xf2 0x0f 0xda - invalid */
13044
13045/** Opcode 0x0f 0xdb - pand Pq, Qq */
13046FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13047{
13048 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13049 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13050}
13051
13052
13053/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13054FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13055{
13056 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13057 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13058}
13059
13060
13061/* Opcode 0xf3 0x0f 0xdb - invalid */
13062/* Opcode 0xf2 0x0f 0xdb - invalid */
13063
13064/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13065FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13066{
13067 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13068 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13069}
13070
13071
13072/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13073FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13074{
13075 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13076 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13077}
13078
13079
13080/* Opcode 0xf3 0x0f 0xdc - invalid */
13081/* Opcode 0xf2 0x0f 0xdc - invalid */
13082
13083/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13084FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13085{
13086 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13087 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13088}
13089
13090
13091/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13092FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13093{
13094 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13095 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13096}
13097
13098
13099/* Opcode 0xf3 0x0f 0xdd - invalid */
13100/* Opcode 0xf2 0x0f 0xdd - invalid */
13101
13102/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13103FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13104{
13105 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13106 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13107}
13108
13109
13110/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13111FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13112{
13113 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13114 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13115}
13116
13117/* Opcode 0xf3 0x0f 0xde - invalid */
13118/* Opcode 0xf2 0x0f 0xde - invalid */
13119
13120
13121/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13122FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13123{
13124 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13125 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13126}
13127
13128
13129/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13130FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13131{
13132 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13133 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13134}
13135
13136
13137/* Opcode 0xf3 0x0f 0xdf - invalid */
13138/* Opcode 0xf2 0x0f 0xdf - invalid */
13139
13140/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13141FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13142{
13143 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13144 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13145}
13146
13147
13148/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13149FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13150{
13151 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13152 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13153}
13154
13155
13156/* Opcode 0xf3 0x0f 0xe0 - invalid */
13157/* Opcode 0xf2 0x0f 0xe0 - invalid */
13158
13159/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13160FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13161{
13162 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13163 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13164}
13165
13166
13167/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13168FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13169{
13170 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13171 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13172}
13173
13174
13175/* Opcode 0xf3 0x0f 0xe1 - invalid */
13176/* Opcode 0xf2 0x0f 0xe1 - invalid */
13177
13178/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13179FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13180{
13181 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13182 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13183}
13184
13185
13186/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13187FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13188{
13189 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13190 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13191}
13192
13193
13194/* Opcode 0xf3 0x0f 0xe2 - invalid */
13195/* Opcode 0xf2 0x0f 0xe2 - invalid */
13196
13197/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13198FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13199{
13200 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13201 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13202}
13203
13204
13205/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13206FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13207{
13208 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13209 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13210}
13211
13212
13213/* Opcode 0xf3 0x0f 0xe3 - invalid */
13214/* Opcode 0xf2 0x0f 0xe3 - invalid */
13215
13216/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13217FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13218{
13219 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13220 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13221}
13222
13223
13224/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13225FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13226{
13227 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13228 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13229}
13230
13231
13232/* Opcode 0xf3 0x0f 0xe4 - invalid */
13233/* Opcode 0xf2 0x0f 0xe4 - invalid */
13234
13235/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13236FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13237{
13238 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13239 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13240}
13241
13242
13243/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13244FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13245{
13246 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13247 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13248}
13249
13250
13251/* Opcode 0xf3 0x0f 0xe5 - invalid */
13252/* Opcode 0xf2 0x0f 0xe5 - invalid */
13253/* Opcode 0x0f 0xe6 - invalid */
13254
13255
13256/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13257FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13258{
13259 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13260 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13261}
13262
13263
13264/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13265FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13266{
13267 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13268 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13269}
13270
13271
13272/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13273FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13274{
13275 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13276 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13277}
13278
13279
13280/**
13281 * @opcode 0xe7
13282 * @opcodesub !11 mr/reg
13283 * @oppfx none
13284 * @opcpuid sse
13285 * @opgroup og_sse1_cachect
13286 * @opxcpttype none
13287 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13288 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13289 */
13290FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13291{
13292 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13294 if (IEM_IS_MODRM_MEM_MODE(bRm))
13295 {
13296 /* Register, memory. */
13297 IEM_MC_BEGIN(0, 2);
13298 IEM_MC_LOCAL(uint64_t, uSrc);
13299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13300
13301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13303 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13304 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13305 IEM_MC_FPU_TO_MMX_MODE();
13306
13307 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13308 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13309
13310 IEM_MC_ADVANCE_RIP_AND_FINISH();
13311 IEM_MC_END();
13312 }
13313 /**
13314 * @opdone
13315 * @opmnemonic ud0fe7reg
13316 * @opcode 0xe7
13317 * @opcodesub 11 mr/reg
13318 * @oppfx none
13319 * @opunused immediate
13320 * @opcpuid sse
13321 * @optest ->
13322 */
13323 else
13324 return IEMOP_RAISE_INVALID_OPCODE();
13325}
13326
13327/**
13328 * @opcode 0xe7
13329 * @opcodesub !11 mr/reg
13330 * @oppfx 0x66
13331 * @opcpuid sse2
13332 * @opgroup og_sse2_cachect
13333 * @opxcpttype 1
13334 * @optest op1=-1 op2=2 -> op1=2
13335 * @optest op1=0 op2=-42 -> op1=-42
13336 */
13337FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13338{
13339 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13341 if (IEM_IS_MODRM_MEM_MODE(bRm))
13342 {
13343 /* Register, memory. */
13344 IEM_MC_BEGIN(0, 2);
13345 IEM_MC_LOCAL(RTUINT128U, uSrc);
13346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13347
13348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13350 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13351 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13352
13353 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13354 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13355
13356 IEM_MC_ADVANCE_RIP_AND_FINISH();
13357 IEM_MC_END();
13358 }
13359
13360 /**
13361 * @opdone
13362 * @opmnemonic ud660fe7reg
13363 * @opcode 0xe7
13364 * @opcodesub 11 mr/reg
13365 * @oppfx 0x66
13366 * @opunused immediate
13367 * @opcpuid sse
13368 * @optest ->
13369 */
13370 else
13371 return IEMOP_RAISE_INVALID_OPCODE();
13372}
13373
13374/* Opcode 0xf3 0x0f 0xe7 - invalid */
13375/* Opcode 0xf2 0x0f 0xe7 - invalid */
13376
13377
13378/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13379FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13380{
13381 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13382 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13383}
13384
13385
13386/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13387FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13388{
13389 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13390 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13391}
13392
13393
13394/* Opcode 0xf3 0x0f 0xe8 - invalid */
13395/* Opcode 0xf2 0x0f 0xe8 - invalid */
13396
13397/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13398FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13399{
13400 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13401 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13402}
13403
13404
13405/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13406FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13407{
13408 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13409 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13410}
13411
13412
13413/* Opcode 0xf3 0x0f 0xe9 - invalid */
13414/* Opcode 0xf2 0x0f 0xe9 - invalid */
13415
13416
13417/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13418FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13419{
13420 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13421 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13422}
13423
13424
13425/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13426FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13427{
13428 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13429 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13430}
13431
13432
13433/* Opcode 0xf3 0x0f 0xea - invalid */
13434/* Opcode 0xf2 0x0f 0xea - invalid */
13435
13436
13437/** Opcode 0x0f 0xeb - por Pq, Qq */
13438FNIEMOP_DEF(iemOp_por_Pq_Qq)
13439{
13440 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13441 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13442}
13443
13444
13445/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13446FNIEMOP_DEF(iemOp_por_Vx_Wx)
13447{
13448 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13449 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13450}
13451
13452
13453/* Opcode 0xf3 0x0f 0xeb - invalid */
13454/* Opcode 0xf2 0x0f 0xeb - invalid */
13455
13456/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13457FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13458{
13459 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13460 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13461}
13462
13463
13464/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13465FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13466{
13467 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13468 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13469}
13470
13471
13472/* Opcode 0xf3 0x0f 0xec - invalid */
13473/* Opcode 0xf2 0x0f 0xec - invalid */
13474
13475/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13476FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13477{
13478 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13479 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13480}
13481
13482
13483/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13484FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13485{
13486 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13487 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13488}
13489
13490
13491/* Opcode 0xf3 0x0f 0xed - invalid */
13492/* Opcode 0xf2 0x0f 0xed - invalid */
13493
13494
13495/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13496FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13497{
13498 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13499 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13500}
13501
13502
13503/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13504FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13505{
13506 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13507 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13508}
13509
13510
13511/* Opcode 0xf3 0x0f 0xee - invalid */
13512/* Opcode 0xf2 0x0f 0xee - invalid */
13513
13514
13515/** Opcode 0x0f 0xef - pxor Pq, Qq */
13516FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13517{
13518 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13519 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13520}
13521
13522
13523/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13524FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13525{
13526 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13527 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13528}
13529
13530
13531/* Opcode 0xf3 0x0f 0xef - invalid */
13532/* Opcode 0xf2 0x0f 0xef - invalid */
13533
13534/* Opcode 0x0f 0xf0 - invalid */
13535/* Opcode 0x66 0x0f 0xf0 - invalid */
13536
13537
13538/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13539FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13540{
13541 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13543 if (IEM_IS_MODRM_REG_MODE(bRm))
13544 {
13545 /*
13546 * Register, register - (not implemented, assuming it raises \#UD).
13547 */
13548 return IEMOP_RAISE_INVALID_OPCODE();
13549 }
13550 else
13551 {
13552 /*
13553 * Register, memory.
13554 */
13555 IEM_MC_BEGIN(0, 2);
13556 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13558
13559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13561 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13562 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13563 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13564 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13565
13566 IEM_MC_ADVANCE_RIP_AND_FINISH();
13567 IEM_MC_END();
13568 }
13569}
13570
13571
13572/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13573FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13574{
13575 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13576 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13577}
13578
13579
13580/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13581FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13582{
13583 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13584 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13585}
13586
13587
13588/* Opcode 0xf2 0x0f 0xf1 - invalid */
13589
13590/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13591FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13592{
13593 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13594 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13595}
13596
13597
13598/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13599FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13600{
13601 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13602 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13603}
13604
13605
13606/* Opcode 0xf2 0x0f 0xf2 - invalid */
13607
13608/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13609FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13610{
13611 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13612 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13613}
13614
13615
13616/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13617FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13618{
13619 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13620 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13621}
13622
13623/* Opcode 0xf2 0x0f 0xf3 - invalid */
13624
13625/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13626FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13627{
13628 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13629 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13630}
13631
13632
13633/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13634FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13635{
13636 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13637 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13638}
13639
13640
13641/* Opcode 0xf2 0x0f 0xf4 - invalid */
13642
13643/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13644FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13645{
13646 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13647 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13648}
13649
13650
13651/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13652FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13653{
13654 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13655 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13656}
13657
13658/* Opcode 0xf2 0x0f 0xf5 - invalid */
13659
13660/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13661FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13662{
13663 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13664 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13665}
13666
13667
13668/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13669FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13670{
13671 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13672 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13673}
13674
13675
13676/* Opcode 0xf2 0x0f 0xf6 - invalid */
13677
13678/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13679FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13680/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13681FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13682/* Opcode 0xf2 0x0f 0xf7 - invalid */
13683
13684
13685/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13686FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13687{
13688 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13689 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13690}
13691
13692
13693/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13694FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13695{
13696 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13697 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13698}
13699
13700
13701/* Opcode 0xf2 0x0f 0xf8 - invalid */
13702
13703
13704/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13705FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13706{
13707 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13708 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13709}
13710
13711
13712/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13713FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13714{
13715 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13716 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13717}
13718
13719
13720/* Opcode 0xf2 0x0f 0xf9 - invalid */
13721
13722
13723/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13724FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13725{
13726 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13727 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13728}
13729
13730
13731/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13732FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13733{
13734 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13735 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13736}
13737
13738
13739/* Opcode 0xf2 0x0f 0xfa - invalid */
13740
13741
13742/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13743FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13744{
13745 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13746 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13747}
13748
13749
13750/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13751FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13752{
13753 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13754 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13755}
13756
13757
13758/* Opcode 0xf2 0x0f 0xfb - invalid */
13759
13760
13761/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13762FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13763{
13764 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13765 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13766}
13767
13768
13769/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13770FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13771{
13772 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13773 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13774}
13775
13776
13777/* Opcode 0xf2 0x0f 0xfc - invalid */
13778
13779
13780/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13781FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13782{
13783 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13784 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13785}
13786
13787
13788/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13789FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13790{
13791 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13792 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13793}
13794
13795
13796/* Opcode 0xf2 0x0f 0xfd - invalid */
13797
13798
13799/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13800FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13801{
13802 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13803 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13804}
13805
13806
13807/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13808FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13809{
13810 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13811 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13812}
13813
13814
13815/* Opcode 0xf2 0x0f 0xfe - invalid */
13816
13817
13818/** Opcode **** 0x0f 0xff - UD0 */
13819FNIEMOP_DEF(iemOp_ud0)
13820{
13821 IEMOP_MNEMONIC(ud0, "ud0");
13822 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13823 {
13824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13825#ifndef TST_IEM_CHECK_MC
13826 if (IEM_IS_MODRM_MEM_MODE(bRm))
13827 {
13828 RTGCPTR GCPtrEff;
13829 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
13830 if (rcStrict != VINF_SUCCESS)
13831 return rcStrict;
13832 }
13833#endif
13834 IEMOP_HLP_DONE_DECODING();
13835 }
13836 return IEMOP_RAISE_INVALID_OPCODE();
13837}
13838
13839
13840
13841/**
13842 * Two byte opcode map, first byte 0x0f.
13843 *
13844 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13845 * check if it needs updating as well when making changes.
13846 */
13847IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
13848{
13849 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13850 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13851 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13852 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13853 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13854 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13855 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13856 /* 0x06 */ IEMOP_X4(iemOp_clts),
13857 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13858 /* 0x08 */ IEMOP_X4(iemOp_invd),
13859 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13860 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13861 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13862 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
13863 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
13864 /* 0x0e */ IEMOP_X4(iemOp_femms),
13865 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
13866
13867 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
13868 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
13869 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
13870 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13871 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13872 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13873 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
13874 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13875 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
13876 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
13877 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
13878 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
13879 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
13880 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
13881 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
13882 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
13883
13884 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
13885 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
13886 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
13887 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
13888 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
13889 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13890 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
13891 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
13892 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13893 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13894 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
13895 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13896 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
13897 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
13898 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13899 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13900
13901 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
13902 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
13903 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
13904 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
13905 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
13906 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
13907 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
13908 /* 0x37 */ IEMOP_X4(iemOp_getsec),
13909 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
13910 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13911 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
13912 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13913 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13914 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
13915 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13916 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
13917
13918 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
13919 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
13920 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
13921 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
13922 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
13923 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
13924 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
13925 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
13926 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
13927 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
13928 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
13929 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
13930 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
13931 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
13932 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
13933 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
13934
13935 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13936 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
13937 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
13938 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
13939 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13940 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13941 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13942 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13943 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
13944 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
13945 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
13946 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
13947 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
13948 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
13949 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
13950 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
13951
13952 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13953 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13954 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13955 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13956 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13957 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13958 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13959 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13960 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13961 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13962 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13963 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13964 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13965 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13966 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13967 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
13968
13969 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
13970 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
13971 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
13972 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
13973 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13974 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13975 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13976 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13977
13978 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13979 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13980 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13981 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
13982 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
13983 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
13984 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
13985 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
13986
13987 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
13988 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
13989 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
13990 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
13991 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
13992 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
13993 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
13994 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
13995 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
13996 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
13997 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
13998 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
13999 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14000 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14001 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14002 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14003
14004 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14005 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14006 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14007 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14008 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14009 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14010 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14011 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14012 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14013 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14014 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14015 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14016 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14017 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14018 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14019 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14020
14021 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14022 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14023 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14024 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14025 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14026 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14027 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14028 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14029 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14030 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14031 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14032 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14033 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14034 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14035 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14036 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14037
14038 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14039 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14040 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14041 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14042 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14043 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14044 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14045 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14046 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14047 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14048 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14049 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14050 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14051 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14052 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14053 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14054
14055 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14056 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14057 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14058 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14059 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14060 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14061 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14062 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14063 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14064 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14065 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14066 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14067 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14068 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14069 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14070 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14071
14072 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14073 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14074 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14075 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14076 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14077 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14078 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14079 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14080 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14081 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14082 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14083 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14084 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14085 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14086 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14087 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14088
14089 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14090 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14091 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14092 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14093 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14094 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14096 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14097 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14098 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14099 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14104 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105
14106 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14107 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14108 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14109 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14110 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14111 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14112 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14113 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14114 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14115 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14121 /* 0xff */ IEMOP_X4(iemOp_ud0),
14122};
14123AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14124
14125/** @} */
14126
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette