VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 102010

Last change on this file since 102010 was 101984, checked in by vboxsync, 16 months ago

VMM/IEM: Added a flush mask for guest register shadows to the IEM_MC_DEFER_TO_CIMPL_X_RET macros to better manage register optimizations when recompiling to native code. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 522.3 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 101984 2023-11-08 15:56:18Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
1203 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1204 }
1205
1206 /* Ignore operand size here, memory refs are always 16-bit. */
1207 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
1229 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1230 }
1231
1232 /* Ignore operand size here, memory refs are always 16-bit. */
1233 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1314 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1341}
1342
1343
1344/** Opcode 0x0f 0x00 /5. */
1345FNIEMOPRM_DEF(iemOp_Grp6_verw)
1346{
1347 IEMOP_MNEMONIC(verw, "verw Ew");
1348 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1349}
1350
1351
1352/**
1353 * Group 6 jump table.
1354 */
1355IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1356{
1357 iemOp_Grp6_sldt,
1358 iemOp_Grp6_str,
1359 iemOp_Grp6_lldt,
1360 iemOp_Grp6_ltr,
1361 iemOp_Grp6_verr,
1362 iemOp_Grp6_verw,
1363 iemOp_InvalidWithRM,
1364 iemOp_InvalidWithRM
1365};
1366
1367/** Opcode 0x0f 0x00. */
1368FNIEMOP_DEF(iemOp_Grp6)
1369{
1370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1371 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1372}
1373
1374
1375/** Opcode 0x0f 0x01 /0. */
1376FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1377{
1378 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1379 IEMOP_HLP_MIN_286();
1380 IEMOP_HLP_64BIT_OP_SIZE();
1381 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1382 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1385 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1386 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1387 IEM_MC_END();
1388}
1389
1390
1391/** Opcode 0x0f 0x01 /0. */
1392FNIEMOP_DEF(iemOp_Grp7_vmcall)
1393{
1394 IEMOP_MNEMONIC(vmcall, "vmcall");
1395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1396
1397 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1398 want all hypercalls regardless of instruction used, and if a
1399 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1400 (NEM/win makes ASSUMPTIONS about this behavior.) */
1401 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1402}
1403
1404
1405/** Opcode 0x0f 0x01 /0. */
1406#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1407FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1408{
1409 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1410 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1411 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1412 IEMOP_HLP_DONE_DECODING();
1413 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1414 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1415 iemCImpl_vmlaunch);
1416}
1417#else
1418FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1419{
1420 IEMOP_BITCH_ABOUT_STUB();
1421 IEMOP_RAISE_INVALID_OPCODE_RET();
1422}
1423#endif
1424
1425
1426/** Opcode 0x0f 0x01 /0. */
1427#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1428FNIEMOP_DEF(iemOp_Grp7_vmresume)
1429{
1430 IEMOP_MNEMONIC(vmresume, "vmresume");
1431 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1432 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1433 IEMOP_HLP_DONE_DECODING();
1434 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1435 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1436 iemCImpl_vmresume);
1437}
1438#else
1439FNIEMOP_DEF(iemOp_Grp7_vmresume)
1440{
1441 IEMOP_BITCH_ABOUT_STUB();
1442 IEMOP_RAISE_INVALID_OPCODE_RET();
1443}
1444#endif
1445
1446
1447/** Opcode 0x0f 0x01 /0. */
1448#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1449FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1450{
1451 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1452 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1453 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1454 IEMOP_HLP_DONE_DECODING();
1455 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1456}
1457#else
1458FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1459{
1460 IEMOP_BITCH_ABOUT_STUB();
1461 IEMOP_RAISE_INVALID_OPCODE_RET();
1462}
1463#endif
1464
1465
1466/** Opcode 0x0f 0x01 /1. */
1467FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1468{
1469 IEMOP_MNEMONIC(sidt, "sidt Ms");
1470 IEMOP_HLP_MIN_286();
1471 IEMOP_HLP_64BIT_OP_SIZE();
1472 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1473 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1476 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1477 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1478 IEM_MC_END();
1479}
1480
1481
1482/** Opcode 0x0f 0x01 /1. */
1483FNIEMOP_DEF(iemOp_Grp7_monitor)
1484{
1485 IEMOP_MNEMONIC(monitor, "monitor");
1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1487 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1488}
1489
1490
1491/** Opcode 0x0f 0x01 /1. */
1492FNIEMOP_DEF(iemOp_Grp7_mwait)
1493{
1494 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1496 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1497}
1498
1499
1500/** Opcode 0x0f 0x01 /2. */
1501FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1502{
1503 IEMOP_MNEMONIC(lgdt, "lgdt");
1504 IEMOP_HLP_64BIT_OP_SIZE();
1505 IEM_MC_BEGIN(3, 1, 0, 0);
1506 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1509 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1511 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1512 IEM_MC_END();
1513}
1514
1515
1516/** Opcode 0x0f 0x01 0xd0. */
1517FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1518{
1519 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1520 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1521 {
1522 /** @todo r=ramshankar: We should use
1523 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1524 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1525/** @todo testcase: test prefixes and exceptions. currently not checking for the
1526 * OPSIZE one ... */
1527 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1528 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1529 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1530 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1531 iemCImpl_xgetbv);
1532 }
1533 IEMOP_RAISE_INVALID_OPCODE_RET();
1534}
1535
1536
1537/** Opcode 0x0f 0x01 0xd1. */
1538FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1539{
1540 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1541 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1542 {
1543 /** @todo r=ramshankar: We should use
1544 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1545 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1546/** @todo testcase: test prefixes and exceptions. currently not checking for the
1547 * OPSIZE one ... */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1550 }
1551 IEMOP_RAISE_INVALID_OPCODE_RET();
1552}
1553
1554
1555/** Opcode 0x0f 0x01 /3. */
1556FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1557{
1558 IEMOP_MNEMONIC(lidt, "lidt");
1559 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1560 IEM_MC_BEGIN(3, 1, 0, 0);
1561 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1564 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1565 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1566 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1567 IEM_MC_END();
1568}
1569
1570
1571/** Opcode 0x0f 0x01 0xd8. */
1572#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1573FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1574{
1575 IEMOP_MNEMONIC(vmrun, "vmrun");
1576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1577 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1578 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1579 iemCImpl_vmrun);
1580}
1581#else
1582FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1583#endif
1584
1585/** Opcode 0x0f 0x01 0xd9. */
1586FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1587{
1588 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1589 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1590 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1591 * here cannot be right... */
1592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1593
1594 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1595 want all hypercalls regardless of instruction used, and if a
1596 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1597 (NEM/win makes ASSUMPTIONS about this behavior.) */
1598 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1599}
1600
1601/** Opcode 0x0f 0x01 0xda. */
1602#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1603FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1604{
1605 IEMOP_MNEMONIC(vmload, "vmload");
1606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1607 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1608}
1609#else
1610FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1611#endif
1612
1613
1614/** Opcode 0x0f 0x01 0xdb. */
1615#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1616FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1617{
1618 IEMOP_MNEMONIC(vmsave, "vmsave");
1619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1620 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1621}
1622#else
1623FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1624#endif
1625
1626
1627/** Opcode 0x0f 0x01 0xdc. */
1628#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1629FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1630{
1631 IEMOP_MNEMONIC(stgi, "stgi");
1632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1633 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1634}
1635#else
1636FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1637#endif
1638
1639
1640/** Opcode 0x0f 0x01 0xdd. */
1641#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1642FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1643{
1644 IEMOP_MNEMONIC(clgi, "clgi");
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1647}
1648#else
1649FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1650#endif
1651
1652
1653/** Opcode 0x0f 0x01 0xdf. */
1654#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1655FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1656{
1657 IEMOP_MNEMONIC(invlpga, "invlpga");
1658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1659 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1660}
1661#else
1662FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1663#endif
1664
1665
1666/** Opcode 0x0f 0x01 0xde. */
1667#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1668FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1669{
1670 IEMOP_MNEMONIC(skinit, "skinit");
1671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1672 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1673}
1674#else
1675FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1676#endif
1677
1678
1679/** Opcode 0x0f 0x01 /4. */
1680FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1681{
1682 IEMOP_MNEMONIC(smsw, "smsw");
1683 IEMOP_HLP_MIN_286();
1684 if (IEM_IS_MODRM_REG_MODE(bRm))
1685 {
1686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1687 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
1688 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1689 }
1690
1691 /* Ignore operand size here, memory refs are always 16-bit. */
1692 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1693 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1696 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1697 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1698 IEM_MC_END();
1699}
1700
1701
1702/** Opcode 0x0f 0x01 /6. */
1703FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1704{
1705 /* The operand size is effectively ignored, all is 16-bit and only the
1706 lower 3-bits are used. */
1707 IEMOP_MNEMONIC(lmsw, "lmsw");
1708 IEMOP_HLP_MIN_286();
1709 if (IEM_IS_MODRM_REG_MODE(bRm))
1710 {
1711 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1713 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1714 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1715 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1716 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1717 IEM_MC_END();
1718 }
1719 else
1720 {
1721 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1722 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1723 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1726 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1727 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1728 IEM_MC_END();
1729 }
1730}
1731
1732
1733/** Opcode 0x0f 0x01 /7. */
1734FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1735{
1736 IEMOP_MNEMONIC(invlpg, "invlpg");
1737 IEMOP_HLP_MIN_486();
1738 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
1739 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1742 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1743 IEM_MC_END();
1744}
1745
1746
1747/** Opcode 0x0f 0x01 0xf8. */
1748FNIEMOP_DEF(iemOp_Grp7_swapgs)
1749{
1750 IEMOP_MNEMONIC(swapgs, "swapgs");
1751 IEMOP_HLP_ONLY_64BIT();
1752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1753 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1754}
1755
1756
1757/** Opcode 0x0f 0x01 0xf9. */
1758FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1759{
1760 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1762 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1763 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1764 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1765 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1766 iemCImpl_rdtscp);
1767}
1768
1769
1770/**
1771 * Group 7 jump table, memory variant.
1772 */
1773IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1774{
1775 iemOp_Grp7_sgdt,
1776 iemOp_Grp7_sidt,
1777 iemOp_Grp7_lgdt,
1778 iemOp_Grp7_lidt,
1779 iemOp_Grp7_smsw,
1780 iemOp_InvalidWithRM,
1781 iemOp_Grp7_lmsw,
1782 iemOp_Grp7_invlpg
1783};
1784
1785
1786/** Opcode 0x0f 0x01. */
1787FNIEMOP_DEF(iemOp_Grp7)
1788{
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 if (IEM_IS_MODRM_MEM_MODE(bRm))
1791 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1792
1793 switch (IEM_GET_MODRM_REG_8(bRm))
1794 {
1795 case 0:
1796 switch (IEM_GET_MODRM_RM_8(bRm))
1797 {
1798 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1799 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1800 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1801 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1802 }
1803 IEMOP_RAISE_INVALID_OPCODE_RET();
1804
1805 case 1:
1806 switch (IEM_GET_MODRM_RM_8(bRm))
1807 {
1808 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1809 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1810 }
1811 IEMOP_RAISE_INVALID_OPCODE_RET();
1812
1813 case 2:
1814 switch (IEM_GET_MODRM_RM_8(bRm))
1815 {
1816 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1817 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1818 }
1819 IEMOP_RAISE_INVALID_OPCODE_RET();
1820
1821 case 3:
1822 switch (IEM_GET_MODRM_RM_8(bRm))
1823 {
1824 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1825 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1826 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1827 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1828 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1829 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1830 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1831 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1833 }
1834
1835 case 4:
1836 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1837
1838 case 5:
1839 IEMOP_RAISE_INVALID_OPCODE_RET();
1840
1841 case 6:
1842 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1843
1844 case 7:
1845 switch (IEM_GET_MODRM_RM_8(bRm))
1846 {
1847 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1848 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1849 }
1850 IEMOP_RAISE_INVALID_OPCODE_RET();
1851
1852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1853 }
1854}
1855
1856/** Opcode 0x0f 0x00 /3. */
1857FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1858{
1859 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861
1862 if (IEM_IS_MODRM_REG_MODE(bRm))
1863 {
1864 switch (pVCpu->iem.s.enmEffOpSize)
1865 {
1866 case IEMMODE_16BIT:
1867 IEM_MC_BEGIN(3, 0, 0, 0);
1868 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1870 IEM_MC_ARG(uint16_t, u16Sel, 1);
1871 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1872
1873 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1874 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1875 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_REG(pVCpu, bRm));
1876 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1877
1878 IEM_MC_END();
1879 break;
1880
1881 case IEMMODE_32BIT:
1882 case IEMMODE_64BIT:
1883 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
1884 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1885 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1886 IEM_MC_ARG(uint16_t, u16Sel, 1);
1887 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1888
1889 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1890 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1891 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_REG(pVCpu, bRm));
1892 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1893
1894 IEM_MC_END();
1895 break;
1896
1897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1898 }
1899 }
1900 else
1901 {
1902 switch (pVCpu->iem.s.enmEffOpSize)
1903 {
1904 case IEMMODE_16BIT:
1905 IEM_MC_BEGIN(3, 1, 0, 0);
1906 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1907 IEM_MC_ARG(uint16_t, u16Sel, 1);
1908 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1910
1911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1912 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1913
1914 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1916 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_REG(pVCpu, bRm));
1917 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1918
1919 IEM_MC_END();
1920 break;
1921
1922 case IEMMODE_32BIT:
1923 case IEMMODE_64BIT:
1924 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
1925 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1926 IEM_MC_ARG(uint16_t, u16Sel, 1);
1927 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1929
1930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1931 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1932/** @todo testcase: make sure it's a 16-bit read. */
1933
1934 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1935 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1936 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_REG(pVCpu, bRm));
1937 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1938
1939 IEM_MC_END();
1940 break;
1941
1942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1943 }
1944 }
1945}
1946
1947
1948
1949/** Opcode 0x0f 0x02. */
1950FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1951{
1952 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1953 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1954}
1955
1956
1957/** Opcode 0x0f 0x03. */
1958FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1959{
1960 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1961 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1962}
1963
1964
1965/** Opcode 0x0f 0x05. */
1966FNIEMOP_DEF(iemOp_syscall)
1967{
1968 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1971 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1972 iemCImpl_syscall);
1973}
1974
1975
1976/** Opcode 0x0f 0x06. */
1977FNIEMOP_DEF(iemOp_clts)
1978{
1979 IEMOP_MNEMONIC(clts, "clts");
1980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1981 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clts);
1982}
1983
1984
1985/** Opcode 0x0f 0x07. */
1986FNIEMOP_DEF(iemOp_sysret)
1987{
1988 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1990 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1991 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1992 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1993}
1994
1995
1996/** Opcode 0x0f 0x08. */
1997FNIEMOP_DEF(iemOp_invd)
1998{
1999 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
2000 IEMOP_HLP_MIN_486();
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2002 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
2003}
2004
2005
2006/** Opcode 0x0f 0x09. */
2007FNIEMOP_DEF(iemOp_wbinvd)
2008{
2009 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2010 IEMOP_HLP_MIN_486();
2011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2012 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
2013}
2014
2015
2016/** Opcode 0x0f 0x0b. */
2017FNIEMOP_DEF(iemOp_ud2)
2018{
2019 IEMOP_MNEMONIC(ud2, "ud2");
2020 IEMOP_RAISE_INVALID_OPCODE_RET();
2021}
2022
2023/** Opcode 0x0f 0x0d. */
2024FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2025{
2026 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2027 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2028 {
2029 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2030 IEMOP_RAISE_INVALID_OPCODE_RET();
2031 }
2032
2033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2034 if (IEM_IS_MODRM_REG_MODE(bRm))
2035 {
2036 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2037 IEMOP_RAISE_INVALID_OPCODE_RET();
2038 }
2039
2040 switch (IEM_GET_MODRM_REG_8(bRm))
2041 {
2042 case 2: /* Aliased to /0 for the time being. */
2043 case 4: /* Aliased to /0 for the time being. */
2044 case 5: /* Aliased to /0 for the time being. */
2045 case 6: /* Aliased to /0 for the time being. */
2046 case 7: /* Aliased to /0 for the time being. */
2047 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2048 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2049 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2051 }
2052
2053 IEM_MC_BEGIN(0, 1, 0, 0);
2054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2057 /* Currently a NOP. */
2058 NOREF(GCPtrEffSrc);
2059 IEM_MC_ADVANCE_RIP_AND_FINISH();
2060 IEM_MC_END();
2061}
2062
2063
2064/** Opcode 0x0f 0x0e. */
2065FNIEMOP_DEF(iemOp_femms)
2066{
2067 IEMOP_MNEMONIC(femms, "femms");
2068
2069 IEM_MC_BEGIN(0, 0, 0, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2073 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2074 IEM_MC_FPU_FROM_MMX_MODE();
2075 IEM_MC_ADVANCE_RIP_AND_FINISH();
2076 IEM_MC_END();
2077}
2078
2079
2080/** Opcode 0x0f 0x0f. */
2081FNIEMOP_DEF(iemOp_3Dnow)
2082{
2083 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2084 {
2085 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2086 IEMOP_RAISE_INVALID_OPCODE_RET();
2087 }
2088
2089#ifdef IEM_WITH_3DNOW
2090 /* This is pretty sparse, use switch instead of table. */
2091 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2092 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2093#else
2094 IEMOP_BITCH_ABOUT_STUB();
2095 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2096#endif
2097}
2098
2099
2100/**
2101 * @opcode 0x10
2102 * @oppfx none
2103 * @opcpuid sse
2104 * @opgroup og_sse_simdfp_datamove
2105 * @opxcpttype 4UA
2106 * @optest op1=1 op2=2 -> op1=2
2107 * @optest op1=0 op2=-22 -> op1=-22
2108 */
2109FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2110{
2111 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2113 if (IEM_IS_MODRM_REG_MODE(bRm))
2114 {
2115 /*
2116 * XMM128, XMM128.
2117 */
2118 IEM_MC_BEGIN(0, 0, 0, 0);
2119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2120 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2121 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2122 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2123 IEM_GET_MODRM_RM(pVCpu, bRm));
2124 IEM_MC_ADVANCE_RIP_AND_FINISH();
2125 IEM_MC_END();
2126 }
2127 else
2128 {
2129 /*
2130 * XMM128, [mem128].
2131 */
2132 IEM_MC_BEGIN(0, 2, 0, 0);
2133 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2135
2136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2138 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2140
2141 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2142 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2143
2144 IEM_MC_ADVANCE_RIP_AND_FINISH();
2145 IEM_MC_END();
2146 }
2147
2148}
2149
2150
2151/**
2152 * @opcode 0x10
2153 * @oppfx 0x66
2154 * @opcpuid sse2
2155 * @opgroup og_sse2_pcksclr_datamove
2156 * @opxcpttype 4UA
2157 * @optest op1=1 op2=2 -> op1=2
2158 * @optest op1=0 op2=-42 -> op1=-42
2159 */
2160FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2161{
2162 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2164 if (IEM_IS_MODRM_REG_MODE(bRm))
2165 {
2166 /*
2167 * XMM128, XMM128.
2168 */
2169 IEM_MC_BEGIN(0, 0, 0, 0);
2170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2171 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2173 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2174 IEM_GET_MODRM_RM(pVCpu, bRm));
2175 IEM_MC_ADVANCE_RIP_AND_FINISH();
2176 IEM_MC_END();
2177 }
2178 else
2179 {
2180 /*
2181 * XMM128, [mem128].
2182 */
2183 IEM_MC_BEGIN(0, 2, 0, 0);
2184 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2186
2187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2191
2192 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2193 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2194
2195 IEM_MC_ADVANCE_RIP_AND_FINISH();
2196 IEM_MC_END();
2197 }
2198}
2199
2200
2201/**
2202 * @opcode 0x10
2203 * @oppfx 0xf3
2204 * @opcpuid sse
2205 * @opgroup og_sse_simdfp_datamove
2206 * @opxcpttype 5
2207 * @optest op1=1 op2=2 -> op1=2
2208 * @optest op1=0 op2=-22 -> op1=-22
2209 */
2210FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2211{
2212 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2214 if (IEM_IS_MODRM_REG_MODE(bRm))
2215 {
2216 /*
2217 * XMM32, XMM32.
2218 */
2219 IEM_MC_BEGIN(0, 1, 0, 0);
2220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2221 IEM_MC_LOCAL(uint32_t, uSrc);
2222
2223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2225 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2226 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2227
2228 IEM_MC_ADVANCE_RIP_AND_FINISH();
2229 IEM_MC_END();
2230 }
2231 else
2232 {
2233 /*
2234 * XMM128, [mem32].
2235 */
2236 IEM_MC_BEGIN(0, 2, 0, 0);
2237 IEM_MC_LOCAL(uint32_t, uSrc);
2238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2239
2240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2242 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2243 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2244
2245 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2246 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2247
2248 IEM_MC_ADVANCE_RIP_AND_FINISH();
2249 IEM_MC_END();
2250 }
2251}
2252
2253
2254/**
2255 * @opcode 0x10
2256 * @oppfx 0xf2
2257 * @opcpuid sse2
2258 * @opgroup og_sse2_pcksclr_datamove
2259 * @opxcpttype 5
2260 * @optest op1=1 op2=2 -> op1=2
2261 * @optest op1=0 op2=-42 -> op1=-42
2262 */
2263FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2264{
2265 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2267 if (IEM_IS_MODRM_REG_MODE(bRm))
2268 {
2269 /*
2270 * XMM64, XMM64.
2271 */
2272 IEM_MC_BEGIN(0, 1, 0, 0);
2273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2274 IEM_MC_LOCAL(uint64_t, uSrc);
2275
2276 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2277 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2278 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2279 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2280
2281 IEM_MC_ADVANCE_RIP_AND_FINISH();
2282 IEM_MC_END();
2283 }
2284 else
2285 {
2286 /*
2287 * XMM128, [mem64].
2288 */
2289 IEM_MC_BEGIN(0, 2, 0, 0);
2290 IEM_MC_LOCAL(uint64_t, uSrc);
2291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2292
2293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2297
2298 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2299 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2300
2301 IEM_MC_ADVANCE_RIP_AND_FINISH();
2302 IEM_MC_END();
2303 }
2304}
2305
2306
2307/**
2308 * @opcode 0x11
2309 * @oppfx none
2310 * @opcpuid sse
2311 * @opgroup og_sse_simdfp_datamove
2312 * @opxcpttype 4UA
2313 * @optest op1=1 op2=2 -> op1=2
2314 * @optest op1=0 op2=-42 -> op1=-42
2315 */
2316FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2317{
2318 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2320 if (IEM_IS_MODRM_REG_MODE(bRm))
2321 {
2322 /*
2323 * XMM128, XMM128.
2324 */
2325 IEM_MC_BEGIN(0, 0, 0, 0);
2326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2329 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2330 IEM_GET_MODRM_REG(pVCpu, bRm));
2331 IEM_MC_ADVANCE_RIP_AND_FINISH();
2332 IEM_MC_END();
2333 }
2334 else
2335 {
2336 /*
2337 * [mem128], XMM128.
2338 */
2339 IEM_MC_BEGIN(0, 2, 0, 0);
2340 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2342
2343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2345 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2346 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2347
2348 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2349 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2350
2351 IEM_MC_ADVANCE_RIP_AND_FINISH();
2352 IEM_MC_END();
2353 }
2354}
2355
2356
2357/**
2358 * @opcode 0x11
2359 * @oppfx 0x66
2360 * @opcpuid sse2
2361 * @opgroup og_sse2_pcksclr_datamove
2362 * @opxcpttype 4UA
2363 * @optest op1=1 op2=2 -> op1=2
2364 * @optest op1=0 op2=-42 -> op1=-42
2365 */
2366FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2367{
2368 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2370 if (IEM_IS_MODRM_REG_MODE(bRm))
2371 {
2372 /*
2373 * XMM128, XMM128.
2374 */
2375 IEM_MC_BEGIN(0, 0, 0, 0);
2376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2377 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2378 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2379 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2380 IEM_GET_MODRM_REG(pVCpu, bRm));
2381 IEM_MC_ADVANCE_RIP_AND_FINISH();
2382 IEM_MC_END();
2383 }
2384 else
2385 {
2386 /*
2387 * [mem128], XMM128.
2388 */
2389 IEM_MC_BEGIN(0, 2, 0, 0);
2390 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2392
2393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2395 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2396 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2397
2398 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2399 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2400
2401 IEM_MC_ADVANCE_RIP_AND_FINISH();
2402 IEM_MC_END();
2403 }
2404}
2405
2406
2407/**
2408 * @opcode 0x11
2409 * @oppfx 0xf3
2410 * @opcpuid sse
2411 * @opgroup og_sse_simdfp_datamove
2412 * @opxcpttype 5
2413 * @optest op1=1 op2=2 -> op1=2
2414 * @optest op1=0 op2=-22 -> op1=-22
2415 */
2416FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2417{
2418 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2420 if (IEM_IS_MODRM_REG_MODE(bRm))
2421 {
2422 /*
2423 * XMM32, XMM32.
2424 */
2425 IEM_MC_BEGIN(0, 1, 0, 0);
2426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2427 IEM_MC_LOCAL(uint32_t, uSrc);
2428
2429 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2430 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2431 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2432 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2433
2434 IEM_MC_ADVANCE_RIP_AND_FINISH();
2435 IEM_MC_END();
2436 }
2437 else
2438 {
2439 /*
2440 * [mem32], XMM32.
2441 */
2442 IEM_MC_BEGIN(0, 2, 0, 0);
2443 IEM_MC_LOCAL(uint32_t, uSrc);
2444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2445
2446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2448 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2449 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2450
2451 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2452 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2453
2454 IEM_MC_ADVANCE_RIP_AND_FINISH();
2455 IEM_MC_END();
2456 }
2457}
2458
2459
2460/**
2461 * @opcode 0x11
2462 * @oppfx 0xf2
2463 * @opcpuid sse2
2464 * @opgroup og_sse2_pcksclr_datamove
2465 * @opxcpttype 5
2466 * @optest op1=1 op2=2 -> op1=2
2467 * @optest op1=0 op2=-42 -> op1=-42
2468 */
2469FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2470{
2471 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 if (IEM_IS_MODRM_REG_MODE(bRm))
2474 {
2475 /*
2476 * XMM64, XMM64.
2477 */
2478 IEM_MC_BEGIN(0, 1, 0, 0);
2479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2480 IEM_MC_LOCAL(uint64_t, uSrc);
2481
2482 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2483 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2484 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2485 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 else
2491 {
2492 /*
2493 * [mem64], XMM64.
2494 */
2495 IEM_MC_BEGIN(0, 2, 0, 0);
2496 IEM_MC_LOCAL(uint64_t, uSrc);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2498
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2501 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2502 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2503
2504 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2505 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2506
2507 IEM_MC_ADVANCE_RIP_AND_FINISH();
2508 IEM_MC_END();
2509 }
2510}
2511
2512
2513FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2514{
2515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2516 if (IEM_IS_MODRM_REG_MODE(bRm))
2517 {
2518 /**
2519 * @opcode 0x12
2520 * @opcodesub 11 mr/reg
2521 * @oppfx none
2522 * @opcpuid sse
2523 * @opgroup og_sse_simdfp_datamove
2524 * @opxcpttype 5
2525 * @optest op1=1 op2=2 -> op1=2
2526 * @optest op1=0 op2=-42 -> op1=-42
2527 */
2528 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2529
2530 IEM_MC_BEGIN(0, 1, 0, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2532 IEM_MC_LOCAL(uint64_t, uSrc);
2533
2534 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2535 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2536 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2537 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2538
2539 IEM_MC_ADVANCE_RIP_AND_FINISH();
2540 IEM_MC_END();
2541 }
2542 else
2543 {
2544 /**
2545 * @opdone
2546 * @opcode 0x12
2547 * @opcodesub !11 mr/reg
2548 * @oppfx none
2549 * @opcpuid sse
2550 * @opgroup og_sse_simdfp_datamove
2551 * @opxcpttype 5
2552 * @optest op1=1 op2=2 -> op1=2
2553 * @optest op1=0 op2=-42 -> op1=-42
2554 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2555 */
2556 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2557
2558 IEM_MC_BEGIN(0, 2, 0, 0);
2559 IEM_MC_LOCAL(uint64_t, uSrc);
2560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2561
2562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2564 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2565 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2566
2567 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2568 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2569
2570 IEM_MC_ADVANCE_RIP_AND_FINISH();
2571 IEM_MC_END();
2572 }
2573}
2574
2575
2576/**
2577 * @opcode 0x12
2578 * @opcodesub !11 mr/reg
2579 * @oppfx 0x66
2580 * @opcpuid sse2
2581 * @opgroup og_sse2_pcksclr_datamove
2582 * @opxcpttype 5
2583 * @optest op1=1 op2=2 -> op1=2
2584 * @optest op1=0 op2=-42 -> op1=-42
2585 */
2586FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2587{
2588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2589 if (IEM_IS_MODRM_MEM_MODE(bRm))
2590 {
2591 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2592
2593 IEM_MC_BEGIN(0, 2, 0, 0);
2594 IEM_MC_LOCAL(uint64_t, uSrc);
2595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2596
2597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2600 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2601
2602 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2603 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2604
2605 IEM_MC_ADVANCE_RIP_AND_FINISH();
2606 IEM_MC_END();
2607 }
2608
2609 /**
2610 * @opdone
2611 * @opmnemonic ud660f12m3
2612 * @opcode 0x12
2613 * @opcodesub 11 mr/reg
2614 * @oppfx 0x66
2615 * @opunused immediate
2616 * @opcpuid sse
2617 * @optest ->
2618 */
2619 else
2620 IEMOP_RAISE_INVALID_OPCODE_RET();
2621}
2622
2623
2624/**
2625 * @opcode 0x12
2626 * @oppfx 0xf3
2627 * @opcpuid sse3
2628 * @opgroup og_sse3_pcksclr_datamove
2629 * @opxcpttype 4
2630 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2631 * op1=0x00000002000000020000000100000001
2632 */
2633FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2634{
2635 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2637 if (IEM_IS_MODRM_REG_MODE(bRm))
2638 {
2639 /*
2640 * XMM, XMM.
2641 */
2642 IEM_MC_BEGIN(0, 1, 0, 0);
2643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2644 IEM_MC_LOCAL(RTUINT128U, uSrc);
2645
2646 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2647 IEM_MC_PREPARE_SSE_USAGE();
2648
2649 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2650 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2651 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2652 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2653 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658 else
2659 {
2660 /*
2661 * XMM, [mem128].
2662 */
2663 IEM_MC_BEGIN(0, 2, 0, 0);
2664 IEM_MC_LOCAL(RTUINT128U, uSrc);
2665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2666
2667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2669 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2670 IEM_MC_PREPARE_SSE_USAGE();
2671
2672 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2673 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2674 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2675 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2676 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2677
2678 IEM_MC_ADVANCE_RIP_AND_FINISH();
2679 IEM_MC_END();
2680 }
2681}
2682
2683
2684/**
2685 * @opcode 0x12
2686 * @oppfx 0xf2
2687 * @opcpuid sse3
2688 * @opgroup og_sse3_pcksclr_datamove
2689 * @opxcpttype 5
2690 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2691 * op1=0x22222222111111112222222211111111
2692 */
2693FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2694{
2695 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2697 if (IEM_IS_MODRM_REG_MODE(bRm))
2698 {
2699 /*
2700 * XMM128, XMM64.
2701 */
2702 IEM_MC_BEGIN(0, 1, 0, 0);
2703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2704 IEM_MC_LOCAL(uint64_t, uSrc);
2705
2706 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2707 IEM_MC_PREPARE_SSE_USAGE();
2708
2709 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2710 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2711 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2712
2713 IEM_MC_ADVANCE_RIP_AND_FINISH();
2714 IEM_MC_END();
2715 }
2716 else
2717 {
2718 /*
2719 * XMM128, [mem64].
2720 */
2721 IEM_MC_BEGIN(0, 2, 0, 0);
2722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2723 IEM_MC_LOCAL(uint64_t, uSrc);
2724
2725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2727 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2728 IEM_MC_PREPARE_SSE_USAGE();
2729
2730 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2731 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2732 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2733
2734 IEM_MC_ADVANCE_RIP_AND_FINISH();
2735 IEM_MC_END();
2736 }
2737}
2738
2739
2740/**
2741 * @opcode 0x13
2742 * @opcodesub !11 mr/reg
2743 * @oppfx none
2744 * @opcpuid sse
2745 * @opgroup og_sse_simdfp_datamove
2746 * @opxcpttype 5
2747 * @optest op1=1 op2=2 -> op1=2
2748 * @optest op1=0 op2=-42 -> op1=-42
2749 */
2750FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2751{
2752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2753 if (IEM_IS_MODRM_MEM_MODE(bRm))
2754 {
2755 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2756
2757 IEM_MC_BEGIN(0, 2, 0, 0);
2758 IEM_MC_LOCAL(uint64_t, uSrc);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2760
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2763 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2764 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2765
2766 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2767 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2768
2769 IEM_MC_ADVANCE_RIP_AND_FINISH();
2770 IEM_MC_END();
2771 }
2772
2773 /**
2774 * @opdone
2775 * @opmnemonic ud0f13m3
2776 * @opcode 0x13
2777 * @opcodesub 11 mr/reg
2778 * @oppfx none
2779 * @opunused immediate
2780 * @opcpuid sse
2781 * @optest ->
2782 */
2783 else
2784 IEMOP_RAISE_INVALID_OPCODE_RET();
2785}
2786
2787
2788/**
2789 * @opcode 0x13
2790 * @opcodesub !11 mr/reg
2791 * @oppfx 0x66
2792 * @opcpuid sse2
2793 * @opgroup og_sse2_pcksclr_datamove
2794 * @opxcpttype 5
2795 * @optest op1=1 op2=2 -> op1=2
2796 * @optest op1=0 op2=-42 -> op1=-42
2797 */
2798FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2799{
2800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2801 if (IEM_IS_MODRM_MEM_MODE(bRm))
2802 {
2803 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2804
2805 IEM_MC_BEGIN(0, 2, 0, 0);
2806 IEM_MC_LOCAL(uint64_t, uSrc);
2807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2808
2809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2812 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2813
2814 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2815 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2816
2817 IEM_MC_ADVANCE_RIP_AND_FINISH();
2818 IEM_MC_END();
2819 }
2820
2821 /**
2822 * @opdone
2823 * @opmnemonic ud660f13m3
2824 * @opcode 0x13
2825 * @opcodesub 11 mr/reg
2826 * @oppfx 0x66
2827 * @opunused immediate
2828 * @opcpuid sse
2829 * @optest ->
2830 */
2831 else
2832 IEMOP_RAISE_INVALID_OPCODE_RET();
2833}
2834
2835
2836/**
2837 * @opmnemonic udf30f13
2838 * @opcode 0x13
2839 * @oppfx 0xf3
2840 * @opunused intel-modrm
2841 * @opcpuid sse
2842 * @optest ->
2843 * @opdone
2844 */
2845
2846/**
2847 * @opmnemonic udf20f13
2848 * @opcode 0x13
2849 * @oppfx 0xf2
2850 * @opunused intel-modrm
2851 * @opcpuid sse
2852 * @optest ->
2853 * @opdone
2854 */
2855
2856/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2857FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2858{
2859 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2860 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2861}
2862
2863
2864/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2865FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2866{
2867 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2868 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2869}
2870
2871
2872/**
2873 * @opdone
2874 * @opmnemonic udf30f14
2875 * @opcode 0x14
2876 * @oppfx 0xf3
2877 * @opunused intel-modrm
2878 * @opcpuid sse
2879 * @optest ->
2880 * @opdone
2881 */
2882
2883/**
2884 * @opmnemonic udf20f14
2885 * @opcode 0x14
2886 * @oppfx 0xf2
2887 * @opunused intel-modrm
2888 * @opcpuid sse
2889 * @optest ->
2890 * @opdone
2891 */
2892
2893/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2894FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2895{
2896 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2897 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2898}
2899
2900
2901/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2902FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2903{
2904 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2905 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2906}
2907
2908
2909/* Opcode 0xf3 0x0f 0x15 - invalid */
2910/* Opcode 0xf2 0x0f 0x15 - invalid */
2911
2912/**
2913 * @opdone
2914 * @opmnemonic udf30f15
2915 * @opcode 0x15
2916 * @oppfx 0xf3
2917 * @opunused intel-modrm
2918 * @opcpuid sse
2919 * @optest ->
2920 * @opdone
2921 */
2922
2923/**
2924 * @opmnemonic udf20f15
2925 * @opcode 0x15
2926 * @oppfx 0xf2
2927 * @opunused intel-modrm
2928 * @opcpuid sse
2929 * @optest ->
2930 * @opdone
2931 */
2932
2933FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2934{
2935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2936 if (IEM_IS_MODRM_REG_MODE(bRm))
2937 {
2938 /**
2939 * @opcode 0x16
2940 * @opcodesub 11 mr/reg
2941 * @oppfx none
2942 * @opcpuid sse
2943 * @opgroup og_sse_simdfp_datamove
2944 * @opxcpttype 5
2945 * @optest op1=1 op2=2 -> op1=2
2946 * @optest op1=0 op2=-42 -> op1=-42
2947 */
2948 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2949
2950 IEM_MC_BEGIN(0, 1, 0, 0);
2951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2952 IEM_MC_LOCAL(uint64_t, uSrc);
2953
2954 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2955 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2956 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2957 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2958
2959 IEM_MC_ADVANCE_RIP_AND_FINISH();
2960 IEM_MC_END();
2961 }
2962 else
2963 {
2964 /**
2965 * @opdone
2966 * @opcode 0x16
2967 * @opcodesub !11 mr/reg
2968 * @oppfx none
2969 * @opcpuid sse
2970 * @opgroup og_sse_simdfp_datamove
2971 * @opxcpttype 5
2972 * @optest op1=1 op2=2 -> op1=2
2973 * @optest op1=0 op2=-42 -> op1=-42
2974 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2975 */
2976 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2977
2978 IEM_MC_BEGIN(0, 2, 0, 0);
2979 IEM_MC_LOCAL(uint64_t, uSrc);
2980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2981
2982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2984 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2985 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2986
2987 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2988 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2989
2990 IEM_MC_ADVANCE_RIP_AND_FINISH();
2991 IEM_MC_END();
2992 }
2993}
2994
2995
2996/**
2997 * @opcode 0x16
2998 * @opcodesub !11 mr/reg
2999 * @oppfx 0x66
3000 * @opcpuid sse2
3001 * @opgroup og_sse2_pcksclr_datamove
3002 * @opxcpttype 5
3003 * @optest op1=1 op2=2 -> op1=2
3004 * @optest op1=0 op2=-42 -> op1=-42
3005 */
3006FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3007{
3008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3009 if (IEM_IS_MODRM_MEM_MODE(bRm))
3010 {
3011 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3012
3013 IEM_MC_BEGIN(0, 2, 0, 0);
3014 IEM_MC_LOCAL(uint64_t, uSrc);
3015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3016
3017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3020 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3021
3022 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3023 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3024
3025 IEM_MC_ADVANCE_RIP_AND_FINISH();
3026 IEM_MC_END();
3027 }
3028
3029 /**
3030 * @opdone
3031 * @opmnemonic ud660f16m3
3032 * @opcode 0x16
3033 * @opcodesub 11 mr/reg
3034 * @oppfx 0x66
3035 * @opunused immediate
3036 * @opcpuid sse
3037 * @optest ->
3038 */
3039 else
3040 IEMOP_RAISE_INVALID_OPCODE_RET();
3041}
3042
3043
3044/**
3045 * @opcode 0x16
3046 * @oppfx 0xf3
3047 * @opcpuid sse3
3048 * @opgroup og_sse3_pcksclr_datamove
3049 * @opxcpttype 4
3050 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3051 * op1=0x00000002000000020000000100000001
3052 */
3053FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3054{
3055 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3057 if (IEM_IS_MODRM_REG_MODE(bRm))
3058 {
3059 /*
3060 * XMM128, XMM128.
3061 */
3062 IEM_MC_BEGIN(0, 1, 0, 0);
3063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3064 IEM_MC_LOCAL(RTUINT128U, uSrc);
3065
3066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3067 IEM_MC_PREPARE_SSE_USAGE();
3068
3069 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3070 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3071 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3072 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3073 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3074
3075 IEM_MC_ADVANCE_RIP_AND_FINISH();
3076 IEM_MC_END();
3077 }
3078 else
3079 {
3080 /*
3081 * XMM128, [mem128].
3082 */
3083 IEM_MC_BEGIN(0, 2, 0, 0);
3084 IEM_MC_LOCAL(RTUINT128U, uSrc);
3085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3086
3087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3090 IEM_MC_PREPARE_SSE_USAGE();
3091
3092 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3093 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3094 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3095 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3096 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3097
3098 IEM_MC_ADVANCE_RIP_AND_FINISH();
3099 IEM_MC_END();
3100 }
3101}
3102
3103/**
3104 * @opdone
3105 * @opmnemonic udf30f16
3106 * @opcode 0x16
3107 * @oppfx 0xf2
3108 * @opunused intel-modrm
3109 * @opcpuid sse
3110 * @optest ->
3111 * @opdone
3112 */
3113
3114
3115/**
3116 * @opcode 0x17
3117 * @opcodesub !11 mr/reg
3118 * @oppfx none
3119 * @opcpuid sse
3120 * @opgroup og_sse_simdfp_datamove
3121 * @opxcpttype 5
3122 * @optest op1=1 op2=2 -> op1=2
3123 * @optest op1=0 op2=-42 -> op1=-42
3124 */
3125FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3126{
3127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3128 if (IEM_IS_MODRM_MEM_MODE(bRm))
3129 {
3130 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3131
3132 IEM_MC_BEGIN(0, 2, 0, 0);
3133 IEM_MC_LOCAL(uint64_t, uSrc);
3134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3135
3136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3138 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3140
3141 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3142 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3143
3144 IEM_MC_ADVANCE_RIP_AND_FINISH();
3145 IEM_MC_END();
3146 }
3147
3148 /**
3149 * @opdone
3150 * @opmnemonic ud0f17m3
3151 * @opcode 0x17
3152 * @opcodesub 11 mr/reg
3153 * @oppfx none
3154 * @opunused immediate
3155 * @opcpuid sse
3156 * @optest ->
3157 */
3158 else
3159 IEMOP_RAISE_INVALID_OPCODE_RET();
3160}
3161
3162
3163/**
3164 * @opcode 0x17
3165 * @opcodesub !11 mr/reg
3166 * @oppfx 0x66
3167 * @opcpuid sse2
3168 * @opgroup og_sse2_pcksclr_datamove
3169 * @opxcpttype 5
3170 * @optest op1=1 op2=2 -> op1=2
3171 * @optest op1=0 op2=-42 -> op1=-42
3172 */
3173FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3174{
3175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3176 if (IEM_IS_MODRM_MEM_MODE(bRm))
3177 {
3178 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3179
3180 IEM_MC_BEGIN(0, 2, 0, 0);
3181 IEM_MC_LOCAL(uint64_t, uSrc);
3182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3183
3184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3186 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3188
3189 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3190 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3191
3192 IEM_MC_ADVANCE_RIP_AND_FINISH();
3193 IEM_MC_END();
3194 }
3195
3196 /**
3197 * @opdone
3198 * @opmnemonic ud660f17m3
3199 * @opcode 0x17
3200 * @opcodesub 11 mr/reg
3201 * @oppfx 0x66
3202 * @opunused immediate
3203 * @opcpuid sse
3204 * @optest ->
3205 */
3206 else
3207 IEMOP_RAISE_INVALID_OPCODE_RET();
3208}
3209
3210
3211/**
3212 * @opdone
3213 * @opmnemonic udf30f17
3214 * @opcode 0x17
3215 * @oppfx 0xf3
3216 * @opunused intel-modrm
3217 * @opcpuid sse
3218 * @optest ->
3219 * @opdone
3220 */
3221
3222/**
3223 * @opmnemonic udf20f17
3224 * @opcode 0x17
3225 * @oppfx 0xf2
3226 * @opunused intel-modrm
3227 * @opcpuid sse
3228 * @optest ->
3229 * @opdone
3230 */
3231
3232
3233/** Opcode 0x0f 0x18. */
3234FNIEMOP_DEF(iemOp_prefetch_Grp16)
3235{
3236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3237 if (IEM_IS_MODRM_MEM_MODE(bRm))
3238 {
3239 switch (IEM_GET_MODRM_REG_8(bRm))
3240 {
3241 case 4: /* Aliased to /0 for the time being according to AMD. */
3242 case 5: /* Aliased to /0 for the time being according to AMD. */
3243 case 6: /* Aliased to /0 for the time being according to AMD. */
3244 case 7: /* Aliased to /0 for the time being according to AMD. */
3245 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3246 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3247 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3248 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3250 }
3251
3252 IEM_MC_BEGIN(0, 1, 0, 0);
3253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3256 /* Currently a NOP. */
3257 NOREF(GCPtrEffSrc);
3258 IEM_MC_ADVANCE_RIP_AND_FINISH();
3259 IEM_MC_END();
3260 }
3261 else
3262 IEMOP_RAISE_INVALID_OPCODE_RET();
3263}
3264
3265
3266/** Opcode 0x0f 0x19..0x1f. */
3267FNIEMOP_DEF(iemOp_nop_Ev)
3268{
3269 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3271 if (IEM_IS_MODRM_REG_MODE(bRm))
3272 {
3273 IEM_MC_BEGIN(0, 0, 0, 0);
3274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3275 IEM_MC_ADVANCE_RIP_AND_FINISH();
3276 IEM_MC_END();
3277 }
3278 else
3279 {
3280 IEM_MC_BEGIN(0, 1, 0, 0);
3281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3284 /* Currently a NOP. */
3285 NOREF(GCPtrEffSrc);
3286 IEM_MC_ADVANCE_RIP_AND_FINISH();
3287 IEM_MC_END();
3288 }
3289}
3290
3291
3292/** Opcode 0x0f 0x20. */
3293FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3294{
3295 /* mod is ignored, as is operand size overrides. */
3296/** @todo testcase: check memory encoding. */
3297 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3298 IEMOP_HLP_MIN_386();
3299 if (IEM_IS_64BIT_CODE(pVCpu))
3300 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3301 else
3302 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3303
3304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3305 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3306 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3307 {
3308 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3309 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3310 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3311 iCrReg |= 8;
3312 }
3313 switch (iCrReg)
3314 {
3315 case 0: case 2: case 3: case 4: case 8:
3316 break;
3317 default:
3318 IEMOP_RAISE_INVALID_OPCODE_RET();
3319 }
3320 IEMOP_HLP_DONE_DECODING();
3321
3322 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3323 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3324 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3325}
3326
3327
3328/** Opcode 0x0f 0x21. */
3329FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3330{
3331/** @todo testcase: check memory encoding. */
3332 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3333 IEMOP_HLP_MIN_386();
3334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3337 IEMOP_RAISE_INVALID_OPCODE_RET();
3338 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3339 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3340 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3341}
3342
3343
3344/** Opcode 0x0f 0x22. */
3345FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3346{
3347 /* mod is ignored, as is operand size overrides. */
3348 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3349 IEMOP_HLP_MIN_386();
3350 if (IEM_IS_64BIT_CODE(pVCpu))
3351 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3352 else
3353 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3354
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3357 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3358 {
3359 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3360 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3361 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3362 iCrReg |= 8;
3363 }
3364 switch (iCrReg)
3365 {
3366 case 0: case 2: case 3: case 4: case 8:
3367 break;
3368 default:
3369 IEMOP_RAISE_INVALID_OPCODE_RET();
3370 }
3371 IEMOP_HLP_DONE_DECODING();
3372
3373 if (iCrReg & (2 | 8))
3374 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3375 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3376 else
3377 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3378 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3379}
3380
3381
3382/** Opcode 0x0f 0x23. */
3383FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3384{
3385 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3386 IEMOP_HLP_MIN_386();
3387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3389 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3390 IEMOP_RAISE_INVALID_OPCODE_RET();
3391 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3392 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3393}
3394
3395
3396/** Opcode 0x0f 0x24. */
3397FNIEMOP_DEF(iemOp_mov_Rd_Td)
3398{
3399 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3400 IEMOP_HLP_MIN_386();
3401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3404 IEMOP_RAISE_INVALID_OPCODE_RET();
3405 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3406 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3407}
3408
3409
3410/** Opcode 0x0f 0x26. */
3411FNIEMOP_DEF(iemOp_mov_Td_Rd)
3412{
3413 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3414 IEMOP_HLP_MIN_386();
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3417 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3418 IEMOP_RAISE_INVALID_OPCODE_RET();
3419 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3420}
3421
3422
3423/**
3424 * @opcode 0x28
3425 * @oppfx none
3426 * @opcpuid sse
3427 * @opgroup og_sse_simdfp_datamove
3428 * @opxcpttype 1
3429 * @optest op1=1 op2=2 -> op1=2
3430 * @optest op1=0 op2=-42 -> op1=-42
3431 */
3432FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3433{
3434 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3436 if (IEM_IS_MODRM_REG_MODE(bRm))
3437 {
3438 /*
3439 * Register, register.
3440 */
3441 IEM_MC_BEGIN(0, 0, 0, 0);
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3443 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3444 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3445 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3446 IEM_GET_MODRM_RM(pVCpu, bRm));
3447 IEM_MC_ADVANCE_RIP_AND_FINISH();
3448 IEM_MC_END();
3449 }
3450 else
3451 {
3452 /*
3453 * Register, memory.
3454 */
3455 IEM_MC_BEGIN(0, 2, 0, 0);
3456 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3458
3459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3461 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3463
3464 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3465 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3466
3467 IEM_MC_ADVANCE_RIP_AND_FINISH();
3468 IEM_MC_END();
3469 }
3470}
3471
3472/**
3473 * @opcode 0x28
3474 * @oppfx 66
3475 * @opcpuid sse2
3476 * @opgroup og_sse2_pcksclr_datamove
3477 * @opxcpttype 1
3478 * @optest op1=1 op2=2 -> op1=2
3479 * @optest op1=0 op2=-42 -> op1=-42
3480 */
3481FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3482{
3483 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3485 if (IEM_IS_MODRM_REG_MODE(bRm))
3486 {
3487 /*
3488 * Register, register.
3489 */
3490 IEM_MC_BEGIN(0, 0, 0, 0);
3491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3492 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3493 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3494 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3495 IEM_GET_MODRM_RM(pVCpu, bRm));
3496 IEM_MC_ADVANCE_RIP_AND_FINISH();
3497 IEM_MC_END();
3498 }
3499 else
3500 {
3501 /*
3502 * Register, memory.
3503 */
3504 IEM_MC_BEGIN(0, 2, 0, 0);
3505 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3510 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3511 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3512
3513 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3514 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3515
3516 IEM_MC_ADVANCE_RIP_AND_FINISH();
3517 IEM_MC_END();
3518 }
3519}
3520
3521/* Opcode 0xf3 0x0f 0x28 - invalid */
3522/* Opcode 0xf2 0x0f 0x28 - invalid */
3523
3524/**
3525 * @opcode 0x29
3526 * @oppfx none
3527 * @opcpuid sse
3528 * @opgroup og_sse_simdfp_datamove
3529 * @opxcpttype 1
3530 * @optest op1=1 op2=2 -> op1=2
3531 * @optest op1=0 op2=-42 -> op1=-42
3532 */
3533FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3534{
3535 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3537 if (IEM_IS_MODRM_REG_MODE(bRm))
3538 {
3539 /*
3540 * Register, register.
3541 */
3542 IEM_MC_BEGIN(0, 0, 0, 0);
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3546 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3547 IEM_GET_MODRM_REG(pVCpu, bRm));
3548 IEM_MC_ADVANCE_RIP_AND_FINISH();
3549 IEM_MC_END();
3550 }
3551 else
3552 {
3553 /*
3554 * Memory, register.
3555 */
3556 IEM_MC_BEGIN(0, 2, 0, 0);
3557 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3559
3560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3562 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3563 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3564
3565 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3566 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3567
3568 IEM_MC_ADVANCE_RIP_AND_FINISH();
3569 IEM_MC_END();
3570 }
3571}
3572
3573/**
3574 * @opcode 0x29
3575 * @oppfx 66
3576 * @opcpuid sse2
3577 * @opgroup og_sse2_pcksclr_datamove
3578 * @opxcpttype 1
3579 * @optest op1=1 op2=2 -> op1=2
3580 * @optest op1=0 op2=-42 -> op1=-42
3581 */
3582FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3583{
3584 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3586 if (IEM_IS_MODRM_REG_MODE(bRm))
3587 {
3588 /*
3589 * Register, register.
3590 */
3591 IEM_MC_BEGIN(0, 0, 0, 0);
3592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3593 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3594 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3595 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3596 IEM_GET_MODRM_REG(pVCpu, bRm));
3597 IEM_MC_ADVANCE_RIP_AND_FINISH();
3598 IEM_MC_END();
3599 }
3600 else
3601 {
3602 /*
3603 * Memory, register.
3604 */
3605 IEM_MC_BEGIN(0, 2, 0, 0);
3606 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3608
3609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3611 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3612 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3613
3614 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3615 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3616
3617 IEM_MC_ADVANCE_RIP_AND_FINISH();
3618 IEM_MC_END();
3619 }
3620}
3621
3622/* Opcode 0xf3 0x0f 0x29 - invalid */
3623/* Opcode 0xf2 0x0f 0x29 - invalid */
3624
3625
3626/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3627FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3628{
3629 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3631 if (IEM_IS_MODRM_REG_MODE(bRm))
3632 {
3633 /*
3634 * XMM, MMX
3635 */
3636 IEM_MC_BEGIN(3, 1, 0, 0);
3637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3638 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3639 IEM_MC_LOCAL(X86XMMREG, Dst);
3640 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3641 IEM_MC_ARG(uint64_t, u64Src, 2);
3642 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3643 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3644 IEM_MC_PREPARE_FPU_USAGE();
3645 IEM_MC_FPU_TO_MMX_MODE();
3646
3647 IEM_MC_REF_MXCSR(pfMxcsr);
3648 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3649 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3650
3651 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3652 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3653 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3654 } IEM_MC_ELSE() {
3655 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3656 } IEM_MC_ENDIF();
3657
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 IEM_MC_END();
3660 }
3661 else
3662 {
3663 /*
3664 * XMM, [mem64]
3665 */
3666 IEM_MC_BEGIN(3, 2, 0, 0);
3667 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3668 IEM_MC_LOCAL(X86XMMREG, Dst);
3669 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3670 IEM_MC_ARG(uint64_t, u64Src, 2);
3671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3672
3673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3675 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3677 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3678
3679 IEM_MC_PREPARE_FPU_USAGE();
3680 IEM_MC_FPU_TO_MMX_MODE();
3681 IEM_MC_REF_MXCSR(pfMxcsr);
3682
3683 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3684 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3685 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3686 } IEM_MC_ELSE() {
3687 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3688 } IEM_MC_ENDIF();
3689
3690 IEM_MC_ADVANCE_RIP_AND_FINISH();
3691 IEM_MC_END();
3692 }
3693}
3694
3695
3696/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3697FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3698{
3699 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3701 if (IEM_IS_MODRM_REG_MODE(bRm))
3702 {
3703 /*
3704 * XMM, MMX
3705 */
3706 IEM_MC_BEGIN(3, 1, 0, 0);
3707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3708 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3709 IEM_MC_LOCAL(X86XMMREG, Dst);
3710 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3711 IEM_MC_ARG(uint64_t, u64Src, 2);
3712 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3714 IEM_MC_PREPARE_FPU_USAGE();
3715 IEM_MC_FPU_TO_MMX_MODE();
3716
3717 IEM_MC_REF_MXCSR(pfMxcsr);
3718 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3719
3720 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3721 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3722 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3723 } IEM_MC_ELSE() {
3724 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3725 } IEM_MC_ENDIF();
3726
3727 IEM_MC_ADVANCE_RIP_AND_FINISH();
3728 IEM_MC_END();
3729 }
3730 else
3731 {
3732 /*
3733 * XMM, [mem64]
3734 */
3735 IEM_MC_BEGIN(3, 3, 0, 0);
3736 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3737 IEM_MC_LOCAL(X86XMMREG, Dst);
3738 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3739 IEM_MC_ARG(uint64_t, u64Src, 2);
3740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3741
3742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3744 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3746 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3747
3748 /* Doesn't cause a transition to MMX mode. */
3749 IEM_MC_PREPARE_SSE_USAGE();
3750 IEM_MC_REF_MXCSR(pfMxcsr);
3751
3752 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3753 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3754 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3755 } IEM_MC_ELSE() {
3756 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3757 } IEM_MC_ENDIF();
3758
3759 IEM_MC_ADVANCE_RIP_AND_FINISH();
3760 IEM_MC_END();
3761 }
3762}
3763
3764
3765/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3766FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3767{
3768 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3769
3770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3771 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3772 {
3773 if (IEM_IS_MODRM_REG_MODE(bRm))
3774 {
3775 /* XMM, greg64 */
3776 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3777 IEM_MC_LOCAL(uint32_t, fMxcsr);
3778 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3779 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3780 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3781 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3782
3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3784 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3785 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3786
3787 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3788 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3789 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3790 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3791 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3792 } IEM_MC_ELSE() {
3793 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3794 } IEM_MC_ENDIF();
3795
3796 IEM_MC_ADVANCE_RIP_AND_FINISH();
3797 IEM_MC_END();
3798 }
3799 else
3800 {
3801 /* XMM, [mem64] */
3802 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3804 IEM_MC_LOCAL(uint32_t, fMxcsr);
3805 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3806 IEM_MC_LOCAL(int64_t, i64Src);
3807 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3808 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3809 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3810
3811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3813 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3814 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3815
3816 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3817 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3818 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3819 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3820 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3821 } IEM_MC_ELSE() {
3822 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3823 } IEM_MC_ENDIF();
3824
3825 IEM_MC_ADVANCE_RIP_AND_FINISH();
3826 IEM_MC_END();
3827 }
3828 }
3829 else
3830 {
3831 if (IEM_IS_MODRM_REG_MODE(bRm))
3832 {
3833 /* greg, XMM */
3834 IEM_MC_BEGIN(3, 2, 0, 0);
3835 IEM_MC_LOCAL(uint32_t, fMxcsr);
3836 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3837 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3838 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3839 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3840
3841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3842 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3843 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3844
3845 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3846 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3847 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3848 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3849 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3850 } IEM_MC_ELSE() {
3851 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3852 } IEM_MC_ENDIF();
3853
3854 IEM_MC_ADVANCE_RIP_AND_FINISH();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 /* greg, [mem32] */
3860 IEM_MC_BEGIN(3, 4, 0, 0);
3861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3862 IEM_MC_LOCAL(uint32_t, fMxcsr);
3863 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3864 IEM_MC_LOCAL(int32_t, i32Src);
3865 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3866 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3867 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3868
3869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3871 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3872 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3873
3874 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3875 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3876 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3877 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3878 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3879 } IEM_MC_ELSE() {
3880 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3881 } IEM_MC_ENDIF();
3882
3883 IEM_MC_ADVANCE_RIP_AND_FINISH();
3884 IEM_MC_END();
3885 }
3886 }
3887}
3888
3889
3890/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3891FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3892{
3893 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3894
3895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3897 {
3898 if (IEM_IS_MODRM_REG_MODE(bRm))
3899 {
3900 /* XMM, greg64 */
3901 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3902 IEM_MC_LOCAL(uint32_t, fMxcsr);
3903 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3904 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3905 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3906 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3907
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3909 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3910 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3911
3912 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3913 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3914 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3915 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3916 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3917 } IEM_MC_ELSE() {
3918 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3919 } IEM_MC_ENDIF();
3920
3921 IEM_MC_ADVANCE_RIP_AND_FINISH();
3922 IEM_MC_END();
3923 }
3924 else
3925 {
3926 /* XMM, [mem64] */
3927 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3929 IEM_MC_LOCAL(uint32_t, fMxcsr);
3930 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3931 IEM_MC_LOCAL(int64_t, i64Src);
3932 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3933 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3934 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3935
3936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3938 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3939 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3940
3941 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3942 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3943 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3944 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3945 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3946 } IEM_MC_ELSE() {
3947 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3948 } IEM_MC_ENDIF();
3949
3950 IEM_MC_ADVANCE_RIP_AND_FINISH();
3951 IEM_MC_END();
3952 }
3953 }
3954 else
3955 {
3956 if (IEM_IS_MODRM_REG_MODE(bRm))
3957 {
3958 /* XMM, greg32 */
3959 IEM_MC_BEGIN(3, 2, 0, 0);
3960 IEM_MC_LOCAL(uint32_t, fMxcsr);
3961 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3962 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3963 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3964 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3965
3966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3967 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3968 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3969
3970 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3971 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3972 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3973 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3974 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3975 } IEM_MC_ELSE() {
3976 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3977 } IEM_MC_ENDIF();
3978
3979 IEM_MC_ADVANCE_RIP_AND_FINISH();
3980 IEM_MC_END();
3981 }
3982 else
3983 {
3984 /* XMM, [mem32] */
3985 IEM_MC_BEGIN(3, 4, 0, 0);
3986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3987 IEM_MC_LOCAL(uint32_t, fMxcsr);
3988 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3989 IEM_MC_LOCAL(int32_t, i32Src);
3990 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3991 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3992 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3993
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3996 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3997 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3998
3999 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4000 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
4001 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4002 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4003 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4004 } IEM_MC_ELSE() {
4005 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
4006 } IEM_MC_ENDIF();
4007
4008 IEM_MC_ADVANCE_RIP_AND_FINISH();
4009 IEM_MC_END();
4010 }
4011 }
4012}
4013
4014
4015/**
4016 * @opcode 0x2b
4017 * @opcodesub !11 mr/reg
4018 * @oppfx none
4019 * @opcpuid sse
4020 * @opgroup og_sse1_cachect
4021 * @opxcpttype 1
4022 * @optest op1=1 op2=2 -> op1=2
4023 * @optest op1=0 op2=-42 -> op1=-42
4024 */
4025FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4026{
4027 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4029 if (IEM_IS_MODRM_MEM_MODE(bRm))
4030 {
4031 /*
4032 * memory, register.
4033 */
4034 IEM_MC_BEGIN(0, 2, 0, 0);
4035 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4037
4038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4040 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4041 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4042
4043 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4044 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4045
4046 IEM_MC_ADVANCE_RIP_AND_FINISH();
4047 IEM_MC_END();
4048 }
4049 /* The register, register encoding is invalid. */
4050 else
4051 IEMOP_RAISE_INVALID_OPCODE_RET();
4052}
4053
4054/**
4055 * @opcode 0x2b
4056 * @opcodesub !11 mr/reg
4057 * @oppfx 0x66
4058 * @opcpuid sse2
4059 * @opgroup og_sse2_cachect
4060 * @opxcpttype 1
4061 * @optest op1=1 op2=2 -> op1=2
4062 * @optest op1=0 op2=-42 -> op1=-42
4063 */
4064FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4065{
4066 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4068 if (IEM_IS_MODRM_MEM_MODE(bRm))
4069 {
4070 /*
4071 * memory, register.
4072 */
4073 IEM_MC_BEGIN(0, 2, 0, 0);
4074 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4076
4077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4080 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4081
4082 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4083 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4084
4085 IEM_MC_ADVANCE_RIP_AND_FINISH();
4086 IEM_MC_END();
4087 }
4088 /* The register, register encoding is invalid. */
4089 else
4090 IEMOP_RAISE_INVALID_OPCODE_RET();
4091}
4092/* Opcode 0xf3 0x0f 0x2b - invalid */
4093/* Opcode 0xf2 0x0f 0x2b - invalid */
4094
4095
4096/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4097FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4098{
4099 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4101 if (IEM_IS_MODRM_REG_MODE(bRm))
4102 {
4103 /*
4104 * Register, register.
4105 */
4106 IEM_MC_BEGIN(3, 1, 0, 0);
4107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4108 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4109 IEM_MC_LOCAL(uint64_t, u64Dst);
4110 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4111 IEM_MC_ARG(uint64_t, u64Src, 2);
4112 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4113 IEM_MC_PREPARE_FPU_USAGE();
4114 IEM_MC_FPU_TO_MMX_MODE();
4115
4116 IEM_MC_REF_MXCSR(pfMxcsr);
4117 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4118
4119 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4120 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4121 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4122 } IEM_MC_ELSE() {
4123 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4124 } IEM_MC_ENDIF();
4125
4126 IEM_MC_ADVANCE_RIP_AND_FINISH();
4127 IEM_MC_END();
4128 }
4129 else
4130 {
4131 /*
4132 * Register, memory.
4133 */
4134 IEM_MC_BEGIN(3, 2, 0, 0);
4135 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4136 IEM_MC_LOCAL(uint64_t, u64Dst);
4137 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4138 IEM_MC_ARG(uint64_t, u64Src, 2);
4139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4140
4141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4143 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4144 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4145
4146 IEM_MC_PREPARE_FPU_USAGE();
4147 IEM_MC_FPU_TO_MMX_MODE();
4148 IEM_MC_REF_MXCSR(pfMxcsr);
4149
4150 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4151 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4152 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4153 } IEM_MC_ELSE() {
4154 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4155 } IEM_MC_ENDIF();
4156
4157 IEM_MC_ADVANCE_RIP_AND_FINISH();
4158 IEM_MC_END();
4159 }
4160}
4161
4162
4163/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4164FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4165{
4166 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4168 if (IEM_IS_MODRM_REG_MODE(bRm))
4169 {
4170 /*
4171 * Register, register.
4172 */
4173 IEM_MC_BEGIN(3, 1, 0, 0);
4174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4175 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4176 IEM_MC_LOCAL(uint64_t, u64Dst);
4177 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4178 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4179 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4180 IEM_MC_PREPARE_FPU_USAGE();
4181 IEM_MC_FPU_TO_MMX_MODE();
4182
4183 IEM_MC_REF_MXCSR(pfMxcsr);
4184 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4185
4186 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4187 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4188 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4189 } IEM_MC_ELSE() {
4190 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4191 } IEM_MC_ENDIF();
4192
4193 IEM_MC_ADVANCE_RIP_AND_FINISH();
4194 IEM_MC_END();
4195 }
4196 else
4197 {
4198 /*
4199 * Register, memory.
4200 */
4201 IEM_MC_BEGIN(3, 3, 0, 0);
4202 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4203 IEM_MC_LOCAL(uint64_t, u64Dst);
4204 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4205 IEM_MC_LOCAL(X86XMMREG, uSrc);
4206 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4208
4209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4211 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4212 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4213
4214 IEM_MC_PREPARE_FPU_USAGE();
4215 IEM_MC_FPU_TO_MMX_MODE();
4216
4217 IEM_MC_REF_MXCSR(pfMxcsr);
4218
4219 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4220 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4221 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4222 } IEM_MC_ELSE() {
4223 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4224 } IEM_MC_ENDIF();
4225
4226 IEM_MC_ADVANCE_RIP_AND_FINISH();
4227 IEM_MC_END();
4228 }
4229}
4230
4231
4232/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4233FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4234{
4235 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4236
4237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4238 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4239 {
4240 if (IEM_IS_MODRM_REG_MODE(bRm))
4241 {
4242 /* greg64, XMM */
4243 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4244 IEM_MC_LOCAL(uint32_t, fMxcsr);
4245 IEM_MC_LOCAL(int64_t, i64Dst);
4246 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4247 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4248 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4249
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4251 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4252 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4253
4254 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4255 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4256 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4257 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4258 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4259 } IEM_MC_ELSE() {
4260 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4261 } IEM_MC_ENDIF();
4262
4263 IEM_MC_ADVANCE_RIP_AND_FINISH();
4264 IEM_MC_END();
4265 }
4266 else
4267 {
4268 /* greg64, [mem64] */
4269 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4271 IEM_MC_LOCAL(uint32_t, fMxcsr);
4272 IEM_MC_LOCAL(int64_t, i64Dst);
4273 IEM_MC_LOCAL(uint32_t, u32Src);
4274 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4275 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4276 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4277
4278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4281 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4282
4283 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4284 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4285 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4286 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4287 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4288 } IEM_MC_ELSE() {
4289 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4290 } IEM_MC_ENDIF();
4291
4292 IEM_MC_ADVANCE_RIP_AND_FINISH();
4293 IEM_MC_END();
4294 }
4295 }
4296 else
4297 {
4298 if (IEM_IS_MODRM_REG_MODE(bRm))
4299 {
4300 /* greg, XMM */
4301 IEM_MC_BEGIN(3, 2, 0, 0);
4302 IEM_MC_LOCAL(uint32_t, fMxcsr);
4303 IEM_MC_LOCAL(int32_t, i32Dst);
4304 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4305 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4306 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4307
4308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4310 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4311
4312 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4313 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4314 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4315 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4316 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4319 } IEM_MC_ENDIF();
4320
4321 IEM_MC_ADVANCE_RIP_AND_FINISH();
4322 IEM_MC_END();
4323 }
4324 else
4325 {
4326 /* greg, [mem] */
4327 IEM_MC_BEGIN(3, 4, 0, 0);
4328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4329 IEM_MC_LOCAL(uint32_t, fMxcsr);
4330 IEM_MC_LOCAL(int32_t, i32Dst);
4331 IEM_MC_LOCAL(uint32_t, u32Src);
4332 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4333 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4334 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4335
4336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4339 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4340
4341 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4342 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4343 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4344 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4345 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4346 } IEM_MC_ELSE() {
4347 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4348 } IEM_MC_ENDIF();
4349
4350 IEM_MC_ADVANCE_RIP_AND_FINISH();
4351 IEM_MC_END();
4352 }
4353 }
4354}
4355
4356
4357/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4358FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4359{
4360 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4361
4362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4363 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4364 {
4365 if (IEM_IS_MODRM_REG_MODE(bRm))
4366 {
4367 /* greg64, XMM */
4368 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4369 IEM_MC_LOCAL(uint32_t, fMxcsr);
4370 IEM_MC_LOCAL(int64_t, i64Dst);
4371 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4372 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4373 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4374
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4376 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4377 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4378
4379 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4380 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4381 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4382 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4383 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4384 } IEM_MC_ELSE() {
4385 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4386 } IEM_MC_ENDIF();
4387
4388 IEM_MC_ADVANCE_RIP_AND_FINISH();
4389 IEM_MC_END();
4390 }
4391 else
4392 {
4393 /* greg64, [mem64] */
4394 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4396 IEM_MC_LOCAL(uint32_t, fMxcsr);
4397 IEM_MC_LOCAL(int64_t, i64Dst);
4398 IEM_MC_LOCAL(uint64_t, u64Src);
4399 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4400 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4401 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4402
4403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4405 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4406 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4407
4408 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4409 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4410 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4411 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4412 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4413 } IEM_MC_ELSE() {
4414 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4415 } IEM_MC_ENDIF();
4416
4417 IEM_MC_ADVANCE_RIP_AND_FINISH();
4418 IEM_MC_END();
4419 }
4420 }
4421 else
4422 {
4423 if (IEM_IS_MODRM_REG_MODE(bRm))
4424 {
4425 /* greg, XMM */
4426 IEM_MC_BEGIN(3, 2, 0, 0);
4427 IEM_MC_LOCAL(uint32_t, fMxcsr);
4428 IEM_MC_LOCAL(int32_t, i32Dst);
4429 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4430 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4431 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4432
4433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4435 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4436
4437 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4438 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4439 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4440 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4441 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4442 } IEM_MC_ELSE() {
4443 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4444 } IEM_MC_ENDIF();
4445
4446 IEM_MC_ADVANCE_RIP_AND_FINISH();
4447 IEM_MC_END();
4448 }
4449 else
4450 {
4451 /* greg32, [mem32] */
4452 IEM_MC_BEGIN(3, 4, 0, 0);
4453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4454 IEM_MC_LOCAL(uint32_t, fMxcsr);
4455 IEM_MC_LOCAL(int32_t, i32Dst);
4456 IEM_MC_LOCAL(uint64_t, u64Src);
4457 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4458 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4459 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4460
4461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4463 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4464 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4465
4466 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4467 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4468 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4469 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4470 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4471 } IEM_MC_ELSE() {
4472 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4473 } IEM_MC_ENDIF();
4474
4475 IEM_MC_ADVANCE_RIP_AND_FINISH();
4476 IEM_MC_END();
4477 }
4478 }
4479}
4480
4481
4482/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4483FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4484{
4485 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4487 if (IEM_IS_MODRM_REG_MODE(bRm))
4488 {
4489 /*
4490 * Register, register.
4491 */
4492 IEM_MC_BEGIN(3, 1, 0, 0);
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4494 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4495 IEM_MC_LOCAL(uint64_t, u64Dst);
4496 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4497 IEM_MC_ARG(uint64_t, u64Src, 2);
4498
4499 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4500 IEM_MC_PREPARE_FPU_USAGE();
4501 IEM_MC_FPU_TO_MMX_MODE();
4502
4503 IEM_MC_REF_MXCSR(pfMxcsr);
4504 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4505
4506 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4507 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4508 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4509 } IEM_MC_ELSE() {
4510 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4511 } IEM_MC_ENDIF();
4512
4513 IEM_MC_ADVANCE_RIP_AND_FINISH();
4514 IEM_MC_END();
4515 }
4516 else
4517 {
4518 /*
4519 * Register, memory.
4520 */
4521 IEM_MC_BEGIN(3, 2, 0, 0);
4522 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4523 IEM_MC_LOCAL(uint64_t, u64Dst);
4524 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4525 IEM_MC_ARG(uint64_t, u64Src, 2);
4526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4527
4528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4531 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4532
4533 IEM_MC_PREPARE_FPU_USAGE();
4534 IEM_MC_FPU_TO_MMX_MODE();
4535 IEM_MC_REF_MXCSR(pfMxcsr);
4536
4537 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4538 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4539 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4540 } IEM_MC_ELSE() {
4541 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4542 } IEM_MC_ENDIF();
4543
4544 IEM_MC_ADVANCE_RIP_AND_FINISH();
4545 IEM_MC_END();
4546 }
4547}
4548
4549
4550/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4551FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4552{
4553 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4555 if (IEM_IS_MODRM_REG_MODE(bRm))
4556 {
4557 /*
4558 * Register, register.
4559 */
4560 IEM_MC_BEGIN(3, 1, 0, 0);
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4562 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4563 IEM_MC_LOCAL(uint64_t, u64Dst);
4564 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4565 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4566
4567 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4568 IEM_MC_PREPARE_FPU_USAGE();
4569 IEM_MC_FPU_TO_MMX_MODE();
4570
4571 IEM_MC_REF_MXCSR(pfMxcsr);
4572 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4573
4574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4575 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4576 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4577 } IEM_MC_ELSE() {
4578 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4579 } IEM_MC_ENDIF();
4580
4581 IEM_MC_ADVANCE_RIP_AND_FINISH();
4582 IEM_MC_END();
4583 }
4584 else
4585 {
4586 /*
4587 * Register, memory.
4588 */
4589 IEM_MC_BEGIN(3, 3, 0, 0);
4590 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4591 IEM_MC_LOCAL(uint64_t, u64Dst);
4592 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4593 IEM_MC_LOCAL(X86XMMREG, uSrc);
4594 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4596
4597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4600 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4601
4602 IEM_MC_PREPARE_FPU_USAGE();
4603 IEM_MC_FPU_TO_MMX_MODE();
4604
4605 IEM_MC_REF_MXCSR(pfMxcsr);
4606
4607 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4608 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4609 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4610 } IEM_MC_ELSE() {
4611 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4612 } IEM_MC_ENDIF();
4613
4614 IEM_MC_ADVANCE_RIP_AND_FINISH();
4615 IEM_MC_END();
4616 }
4617}
4618
4619
4620/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4621FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4622{
4623 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4624
4625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4626 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4627 {
4628 if (IEM_IS_MODRM_REG_MODE(bRm))
4629 {
4630 /* greg64, XMM */
4631 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4632 IEM_MC_LOCAL(uint32_t, fMxcsr);
4633 IEM_MC_LOCAL(int64_t, i64Dst);
4634 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4635 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4636 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4637
4638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4639 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4640 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4641
4642 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4643 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4644 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4645 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4646 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4647 } IEM_MC_ELSE() {
4648 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4649 } IEM_MC_ENDIF();
4650
4651 IEM_MC_ADVANCE_RIP_AND_FINISH();
4652 IEM_MC_END();
4653 }
4654 else
4655 {
4656 /* greg64, [mem64] */
4657 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4659 IEM_MC_LOCAL(uint32_t, fMxcsr);
4660 IEM_MC_LOCAL(int64_t, i64Dst);
4661 IEM_MC_LOCAL(uint32_t, u32Src);
4662 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4663 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4664 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4665
4666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4668 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4669 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4670
4671 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4672 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4673 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4674 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4675 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4676 } IEM_MC_ELSE() {
4677 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4678 } IEM_MC_ENDIF();
4679
4680 IEM_MC_ADVANCE_RIP_AND_FINISH();
4681 IEM_MC_END();
4682 }
4683 }
4684 else
4685 {
4686 if (IEM_IS_MODRM_REG_MODE(bRm))
4687 {
4688 /* greg, XMM */
4689 IEM_MC_BEGIN(3, 2, 0, 0);
4690 IEM_MC_LOCAL(uint32_t, fMxcsr);
4691 IEM_MC_LOCAL(int32_t, i32Dst);
4692 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4693 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4694 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4695
4696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4697 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4698 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4699
4700 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4701 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4702 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4703 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4704 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4705 } IEM_MC_ELSE() {
4706 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4707 } IEM_MC_ENDIF();
4708
4709 IEM_MC_ADVANCE_RIP_AND_FINISH();
4710 IEM_MC_END();
4711 }
4712 else
4713 {
4714 /* greg, [mem] */
4715 IEM_MC_BEGIN(3, 4, 0, 0);
4716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4717 IEM_MC_LOCAL(uint32_t, fMxcsr);
4718 IEM_MC_LOCAL(int32_t, i32Dst);
4719 IEM_MC_LOCAL(uint32_t, u32Src);
4720 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4721 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4722 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4723
4724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4726 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4727 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4728
4729 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4730 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4731 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4732 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4733 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4734 } IEM_MC_ELSE() {
4735 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4736 } IEM_MC_ENDIF();
4737
4738 IEM_MC_ADVANCE_RIP_AND_FINISH();
4739 IEM_MC_END();
4740 }
4741 }
4742}
4743
4744
4745/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4746FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4747{
4748 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4749
4750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4751 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4752 {
4753 if (IEM_IS_MODRM_REG_MODE(bRm))
4754 {
4755 /* greg64, XMM */
4756 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4757 IEM_MC_LOCAL(uint32_t, fMxcsr);
4758 IEM_MC_LOCAL(int64_t, i64Dst);
4759 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4760 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4761 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4762
4763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4764 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4765 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4766
4767 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4768 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4769 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4770 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4771 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4772 } IEM_MC_ELSE() {
4773 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4774 } IEM_MC_ENDIF();
4775
4776 IEM_MC_ADVANCE_RIP_AND_FINISH();
4777 IEM_MC_END();
4778 }
4779 else
4780 {
4781 /* greg64, [mem64] */
4782 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4784 IEM_MC_LOCAL(uint32_t, fMxcsr);
4785 IEM_MC_LOCAL(int64_t, i64Dst);
4786 IEM_MC_LOCAL(uint64_t, u64Src);
4787 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4788 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4789 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4790
4791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4793 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4794 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4795
4796 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4797 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4798 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4799 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4800 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4801 } IEM_MC_ELSE() {
4802 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4803 } IEM_MC_ENDIF();
4804
4805 IEM_MC_ADVANCE_RIP_AND_FINISH();
4806 IEM_MC_END();
4807 }
4808 }
4809 else
4810 {
4811 if (IEM_IS_MODRM_REG_MODE(bRm))
4812 {
4813 /* greg32, XMM */
4814 IEM_MC_BEGIN(3, 2, 0, 0);
4815 IEM_MC_LOCAL(uint32_t, fMxcsr);
4816 IEM_MC_LOCAL(int32_t, i32Dst);
4817 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4818 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4819 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4820
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4822 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4823 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4824
4825 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4826 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4827 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4828 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4829 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4830 } IEM_MC_ELSE() {
4831 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4832 } IEM_MC_ENDIF();
4833
4834 IEM_MC_ADVANCE_RIP_AND_FINISH();
4835 IEM_MC_END();
4836 }
4837 else
4838 {
4839 /* greg32, [mem64] */
4840 IEM_MC_BEGIN(3, 4, 0, 0);
4841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4842 IEM_MC_LOCAL(uint32_t, fMxcsr);
4843 IEM_MC_LOCAL(int32_t, i32Dst);
4844 IEM_MC_LOCAL(uint64_t, u64Src);
4845 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4846 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4847 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4848
4849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4851 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4852 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4853
4854 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4855 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4856 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4857 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4858 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4859 } IEM_MC_ELSE() {
4860 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4861 } IEM_MC_ENDIF();
4862
4863 IEM_MC_ADVANCE_RIP_AND_FINISH();
4864 IEM_MC_END();
4865 }
4866 }
4867}
4868
4869
4870/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4871FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4872{
4873 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4875 if (IEM_IS_MODRM_REG_MODE(bRm))
4876 {
4877 /*
4878 * Register, register.
4879 */
4880 IEM_MC_BEGIN(4, 1, 0, 0);
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4882 IEM_MC_LOCAL(uint32_t, fEFlags);
4883 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4884 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4885 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4886 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4887 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4888 IEM_MC_PREPARE_SSE_USAGE();
4889 IEM_MC_FETCH_EFLAGS(fEFlags);
4890 IEM_MC_REF_MXCSR(pfMxcsr);
4891 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4892 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4893 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4894 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4895 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4896 } IEM_MC_ELSE() {
4897 IEM_MC_COMMIT_EFLAGS(fEFlags);
4898 } IEM_MC_ENDIF();
4899
4900 IEM_MC_ADVANCE_RIP_AND_FINISH();
4901 IEM_MC_END();
4902 }
4903 else
4904 {
4905 /*
4906 * Register, memory.
4907 */
4908 IEM_MC_BEGIN(4, 3, 0, 0);
4909 IEM_MC_LOCAL(uint32_t, fEFlags);
4910 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4911 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4912 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4913 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4914 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4916
4917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4919 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4920 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4921
4922 IEM_MC_PREPARE_SSE_USAGE();
4923 IEM_MC_FETCH_EFLAGS(fEFlags);
4924 IEM_MC_REF_MXCSR(pfMxcsr);
4925 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4926 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4927 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4928 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4929 } IEM_MC_ELSE() {
4930 IEM_MC_COMMIT_EFLAGS(fEFlags);
4931 } IEM_MC_ENDIF();
4932
4933 IEM_MC_ADVANCE_RIP_AND_FINISH();
4934 IEM_MC_END();
4935 }
4936}
4937
4938
4939/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4940FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4941{
4942 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4944 if (IEM_IS_MODRM_REG_MODE(bRm))
4945 {
4946 /*
4947 * Register, register.
4948 */
4949 IEM_MC_BEGIN(4, 1, 0, 0);
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4951 IEM_MC_LOCAL(uint32_t, fEFlags);
4952 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4953 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4954 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4955 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4957 IEM_MC_PREPARE_SSE_USAGE();
4958 IEM_MC_FETCH_EFLAGS(fEFlags);
4959 IEM_MC_REF_MXCSR(pfMxcsr);
4960 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4961 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4962 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4963 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4964 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4965 } IEM_MC_ELSE() {
4966 IEM_MC_COMMIT_EFLAGS(fEFlags);
4967 } IEM_MC_ENDIF();
4968
4969 IEM_MC_ADVANCE_RIP_AND_FINISH();
4970 IEM_MC_END();
4971 }
4972 else
4973 {
4974 /*
4975 * Register, memory.
4976 */
4977 IEM_MC_BEGIN(4, 3, 0, 0);
4978 IEM_MC_LOCAL(uint32_t, fEFlags);
4979 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4980 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4981 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4982 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4983 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4985
4986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4988 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4989 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4990
4991 IEM_MC_PREPARE_SSE_USAGE();
4992 IEM_MC_FETCH_EFLAGS(fEFlags);
4993 IEM_MC_REF_MXCSR(pfMxcsr);
4994 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4995 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4996 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4997 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4998 } IEM_MC_ELSE() {
4999 IEM_MC_COMMIT_EFLAGS(fEFlags);
5000 } IEM_MC_ENDIF();
5001
5002 IEM_MC_ADVANCE_RIP_AND_FINISH();
5003 IEM_MC_END();
5004 }
5005}
5006
5007
5008/* Opcode 0xf3 0x0f 0x2e - invalid */
5009/* Opcode 0xf2 0x0f 0x2e - invalid */
5010
5011
5012/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5013FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5014{
5015 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5017 if (IEM_IS_MODRM_REG_MODE(bRm))
5018 {
5019 /*
5020 * Register, register.
5021 */
5022 IEM_MC_BEGIN(4, 1, 0, 0);
5023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5024 IEM_MC_LOCAL(uint32_t, fEFlags);
5025 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5026 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5027 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5028 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5030 IEM_MC_PREPARE_SSE_USAGE();
5031 IEM_MC_FETCH_EFLAGS(fEFlags);
5032 IEM_MC_REF_MXCSR(pfMxcsr);
5033 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5034 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5035 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5036 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5037 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5038 } IEM_MC_ELSE() {
5039 IEM_MC_COMMIT_EFLAGS(fEFlags);
5040 } IEM_MC_ENDIF();
5041
5042 IEM_MC_ADVANCE_RIP_AND_FINISH();
5043 IEM_MC_END();
5044 }
5045 else
5046 {
5047 /*
5048 * Register, memory.
5049 */
5050 IEM_MC_BEGIN(4, 3, 0, 0);
5051 IEM_MC_LOCAL(uint32_t, fEFlags);
5052 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5053 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5054 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5055 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5056 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5058
5059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5062 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5063
5064 IEM_MC_PREPARE_SSE_USAGE();
5065 IEM_MC_FETCH_EFLAGS(fEFlags);
5066 IEM_MC_REF_MXCSR(pfMxcsr);
5067 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5068 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5069 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5070 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5071 } IEM_MC_ELSE() {
5072 IEM_MC_COMMIT_EFLAGS(fEFlags);
5073 } IEM_MC_ENDIF();
5074
5075 IEM_MC_ADVANCE_RIP_AND_FINISH();
5076 IEM_MC_END();
5077 }
5078}
5079
5080
5081/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5082FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5083{
5084 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5086 if (IEM_IS_MODRM_REG_MODE(bRm))
5087 {
5088 /*
5089 * Register, register.
5090 */
5091 IEM_MC_BEGIN(4, 1, 0, 0);
5092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5093 IEM_MC_LOCAL(uint32_t, fEFlags);
5094 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5095 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5096 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5097 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5099 IEM_MC_PREPARE_SSE_USAGE();
5100 IEM_MC_FETCH_EFLAGS(fEFlags);
5101 IEM_MC_REF_MXCSR(pfMxcsr);
5102 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5103 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5104 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5105 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5106 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5107 } IEM_MC_ELSE() {
5108 IEM_MC_COMMIT_EFLAGS(fEFlags);
5109 } IEM_MC_ENDIF();
5110
5111 IEM_MC_ADVANCE_RIP_AND_FINISH();
5112 IEM_MC_END();
5113 }
5114 else
5115 {
5116 /*
5117 * Register, memory.
5118 */
5119 IEM_MC_BEGIN(4, 3, 0, 0);
5120 IEM_MC_LOCAL(uint32_t, fEFlags);
5121 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5122 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5123 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5124 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5125 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5127
5128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5130 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5131 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5132
5133 IEM_MC_PREPARE_SSE_USAGE();
5134 IEM_MC_FETCH_EFLAGS(fEFlags);
5135 IEM_MC_REF_MXCSR(pfMxcsr);
5136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5138 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5139 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5140 } IEM_MC_ELSE() {
5141 IEM_MC_COMMIT_EFLAGS(fEFlags);
5142 } IEM_MC_ENDIF();
5143
5144 IEM_MC_ADVANCE_RIP_AND_FINISH();
5145 IEM_MC_END();
5146 }
5147}
5148
5149
5150/* Opcode 0xf3 0x0f 0x2f - invalid */
5151/* Opcode 0xf2 0x0f 0x2f - invalid */
5152
5153/** Opcode 0x0f 0x30. */
5154FNIEMOP_DEF(iemOp_wrmsr)
5155{
5156 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5158 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
5159}
5160
5161
5162/** Opcode 0x0f 0x31. */
5163FNIEMOP_DEF(iemOp_rdtsc)
5164{
5165 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5167 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5168 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5169 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5170 iemCImpl_rdtsc);
5171}
5172
5173
5174/** Opcode 0x0f 0x33. */
5175FNIEMOP_DEF(iemOp_rdmsr)
5176{
5177 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5180 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5181 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5182 iemCImpl_rdmsr);
5183}
5184
5185
5186/** Opcode 0x0f 0x34. */
5187FNIEMOP_DEF(iemOp_rdpmc)
5188{
5189 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5191 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5192 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5193 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5194 iemCImpl_rdpmc);
5195}
5196
5197
5198/** Opcode 0x0f 0x34. */
5199FNIEMOP_DEF(iemOp_sysenter)
5200{
5201 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5203 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
5204 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
5205 iemCImpl_sysenter);
5206}
5207
5208/** Opcode 0x0f 0x35. */
5209FNIEMOP_DEF(iemOp_sysexit)
5210{
5211 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5213 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
5214 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
5215 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5216}
5217
5218/** Opcode 0x0f 0x37. */
5219FNIEMOP_STUB(iemOp_getsec);
5220
5221
5222/** Opcode 0x0f 0x38. */
5223FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5224{
5225#ifdef IEM_WITH_THREE_0F_38
5226 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5227 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5228#else
5229 IEMOP_BITCH_ABOUT_STUB();
5230 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5231#endif
5232}
5233
5234
5235/** Opcode 0x0f 0x3a. */
5236FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5237{
5238#ifdef IEM_WITH_THREE_0F_3A
5239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5240 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5241#else
5242 IEMOP_BITCH_ABOUT_STUB();
5243 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5244#endif
5245}
5246
5247
5248/**
5249 * Implements a conditional move.
5250 *
5251 * Wish there was an obvious way to do this where we could share and reduce
5252 * code bloat.
5253 *
5254 * @param a_Cnd The conditional "microcode" operation.
5255 */
5256#define CMOV_X(a_Cnd) \
5257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5258 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5259 { \
5260 switch (pVCpu->iem.s.enmEffOpSize) \
5261 { \
5262 case IEMMODE_16BIT: \
5263 IEM_MC_BEGIN(0, 1, 0, 0); \
5264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5265 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5266 a_Cnd { \
5267 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5268 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5269 } IEM_MC_ENDIF(); \
5270 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5271 IEM_MC_END(); \
5272 break; \
5273 \
5274 case IEMMODE_32BIT: \
5275 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0); \
5276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5277 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5278 a_Cnd { \
5279 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5280 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5281 } IEM_MC_ELSE() { \
5282 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5283 } IEM_MC_ENDIF(); \
5284 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5285 IEM_MC_END(); \
5286 break; \
5287 \
5288 case IEMMODE_64BIT: \
5289 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0); \
5290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5291 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5292 a_Cnd { \
5293 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5294 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5295 } IEM_MC_ENDIF(); \
5296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5297 IEM_MC_END(); \
5298 break; \
5299 \
5300 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5301 } \
5302 } \
5303 else \
5304 { \
5305 switch (pVCpu->iem.s.enmEffOpSize) \
5306 { \
5307 case IEMMODE_16BIT: \
5308 IEM_MC_BEGIN(0, 2, 0, 0); \
5309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5310 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5313 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5314 a_Cnd { \
5315 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5316 } IEM_MC_ENDIF(); \
5317 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5318 IEM_MC_END(); \
5319 break; \
5320 \
5321 case IEMMODE_32BIT: \
5322 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0); \
5323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5324 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5327 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5328 a_Cnd { \
5329 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5330 } IEM_MC_ELSE() { \
5331 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5332 } IEM_MC_ENDIF(); \
5333 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5334 IEM_MC_END(); \
5335 break; \
5336 \
5337 case IEMMODE_64BIT: \
5338 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0); \
5339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5340 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5343 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5344 a_Cnd { \
5345 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5346 } IEM_MC_ENDIF(); \
5347 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5348 IEM_MC_END(); \
5349 break; \
5350 \
5351 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5352 } \
5353 } do {} while (0)
5354
5355
5356
5357/** Opcode 0x0f 0x40. */
5358FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5359{
5360 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5361 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5362}
5363
5364
5365/** Opcode 0x0f 0x41. */
5366FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5367{
5368 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5369 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5370}
5371
5372
5373/** Opcode 0x0f 0x42. */
5374FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5375{
5376 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5377 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5378}
5379
5380
5381/** Opcode 0x0f 0x43. */
5382FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5383{
5384 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5385 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5386}
5387
5388
5389/** Opcode 0x0f 0x44. */
5390FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5391{
5392 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5393 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5394}
5395
5396
5397/** Opcode 0x0f 0x45. */
5398FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5399{
5400 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5401 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5402}
5403
5404
5405/** Opcode 0x0f 0x46. */
5406FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5407{
5408 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5409 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5410}
5411
5412
5413/** Opcode 0x0f 0x47. */
5414FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5415{
5416 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5417 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5418}
5419
5420
5421/** Opcode 0x0f 0x48. */
5422FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5423{
5424 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5425 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5426}
5427
5428
5429/** Opcode 0x0f 0x49. */
5430FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5431{
5432 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5433 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5434}
5435
5436
5437/** Opcode 0x0f 0x4a. */
5438FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5439{
5440 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5441 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5442}
5443
5444
5445/** Opcode 0x0f 0x4b. */
5446FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5447{
5448 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5449 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5450}
5451
5452
5453/** Opcode 0x0f 0x4c. */
5454FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5455{
5456 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5457 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5458}
5459
5460
5461/** Opcode 0x0f 0x4d. */
5462FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5463{
5464 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5465 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5466}
5467
5468
5469/** Opcode 0x0f 0x4e. */
5470FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5471{
5472 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5473 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5474}
5475
5476
5477/** Opcode 0x0f 0x4f. */
5478FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5479{
5480 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5481 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5482}
5483
5484#undef CMOV_X
5485
5486/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5487FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5488{
5489 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5491 if (IEM_IS_MODRM_REG_MODE(bRm))
5492 {
5493 /*
5494 * Register, register.
5495 */
5496 IEM_MC_BEGIN(2, 1, 0, 0);
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5498 IEM_MC_LOCAL(uint8_t, u8Dst);
5499 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5500 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5501 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5502 IEM_MC_PREPARE_SSE_USAGE();
5503 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5504 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5505 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5506 IEM_MC_ADVANCE_RIP_AND_FINISH();
5507 IEM_MC_END();
5508 }
5509 /* No memory operand. */
5510 else
5511 IEMOP_RAISE_INVALID_OPCODE_RET();
5512}
5513
5514
5515/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5516FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5517{
5518 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5520 if (IEM_IS_MODRM_REG_MODE(bRm))
5521 {
5522 /*
5523 * Register, register.
5524 */
5525 IEM_MC_BEGIN(2, 1, 0, 0);
5526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5527 IEM_MC_LOCAL(uint8_t, u8Dst);
5528 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5531 IEM_MC_PREPARE_SSE_USAGE();
5532 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5533 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5534 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5535 IEM_MC_ADVANCE_RIP_AND_FINISH();
5536 IEM_MC_END();
5537 }
5538 /* No memory operand. */
5539 else
5540 IEMOP_RAISE_INVALID_OPCODE_RET();
5541
5542}
5543
5544
5545/* Opcode 0xf3 0x0f 0x50 - invalid */
5546/* Opcode 0xf2 0x0f 0x50 - invalid */
5547
5548
5549/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5550FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5551{
5552 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5553 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5554}
5555
5556
5557/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5558FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5559{
5560 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5561 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5562}
5563
5564
5565/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5566FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5567{
5568 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5569 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5570}
5571
5572
5573/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5574FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5575{
5576 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5577 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5578}
5579
5580
5581/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5582FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5583{
5584 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5585 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5586}
5587
5588
5589/* Opcode 0x66 0x0f 0x52 - invalid */
5590
5591
5592/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5593FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5594{
5595 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5596 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5597}
5598
5599
5600/* Opcode 0xf2 0x0f 0x52 - invalid */
5601
5602/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5603FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5604/* Opcode 0x66 0x0f 0x53 - invalid */
5605/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5606FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5607/* Opcode 0xf2 0x0f 0x53 - invalid */
5608
5609
5610/** Opcode 0x0f 0x54 - andps Vps, Wps */
5611FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5612{
5613 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5614 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5615}
5616
5617
5618/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5619FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5620{
5621 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5622 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5623}
5624
5625
5626/* Opcode 0xf3 0x0f 0x54 - invalid */
5627/* Opcode 0xf2 0x0f 0x54 - invalid */
5628
5629
5630/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5631FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5632{
5633 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5634 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5635}
5636
5637
5638/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5639FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5640{
5641 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5642 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5643}
5644
5645
5646/* Opcode 0xf3 0x0f 0x55 - invalid */
5647/* Opcode 0xf2 0x0f 0x55 - invalid */
5648
5649
5650/** Opcode 0x0f 0x56 - orps Vps, Wps */
5651FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5652{
5653 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5654 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5655}
5656
5657
5658/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5659FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5660{
5661 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5662 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5663}
5664
5665
5666/* Opcode 0xf3 0x0f 0x56 - invalid */
5667/* Opcode 0xf2 0x0f 0x56 - invalid */
5668
5669
5670/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5671FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5672{
5673 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5674 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5675}
5676
5677
5678/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5679FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5680{
5681 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5682 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5683}
5684
5685
5686/* Opcode 0xf3 0x0f 0x57 - invalid */
5687/* Opcode 0xf2 0x0f 0x57 - invalid */
5688
5689/** Opcode 0x0f 0x58 - addps Vps, Wps */
5690FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5691{
5692 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5693 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5694}
5695
5696
5697/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5698FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5699{
5700 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5701 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5702}
5703
5704
5705/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5706FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5707{
5708 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5709 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5710}
5711
5712
5713/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5714FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5715{
5716 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5717 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5718}
5719
5720
5721/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5722FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5723{
5724 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5725 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5726}
5727
5728
5729/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5730FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5731{
5732 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5733 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5734}
5735
5736
5737/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5738FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5739{
5740 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5741 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5742}
5743
5744
5745/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5746FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5747{
5748 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5749 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5750}
5751
5752
5753/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5754FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5755{
5756 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5757 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5758}
5759
5760
5761/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5762FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5763{
5764 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5765 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5766}
5767
5768
5769/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5770FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5771{
5772 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5773 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5774}
5775
5776
5777/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5778FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5779{
5780 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5781 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5782}
5783
5784
5785/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5786FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5787{
5788 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5789 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5790}
5791
5792
5793/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5794FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5795{
5796 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5797 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5798}
5799
5800
5801/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5802FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5803{
5804 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5805 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5806}
5807
5808
5809/* Opcode 0xf2 0x0f 0x5b - invalid */
5810
5811
5812/** Opcode 0x0f 0x5c - subps Vps, Wps */
5813FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5814{
5815 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5816 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5817}
5818
5819
5820/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5821FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5822{
5823 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5824 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5825}
5826
5827
5828/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5829FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5830{
5831 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5832 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5833}
5834
5835
5836/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5837FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5838{
5839 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5840 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5841}
5842
5843
5844/** Opcode 0x0f 0x5d - minps Vps, Wps */
5845FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5846{
5847 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5848 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5849}
5850
5851
5852/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5853FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5854{
5855 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5856 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5857}
5858
5859
5860/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5861FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5862{
5863 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5864 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5865}
5866
5867
5868/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5869FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5870{
5871 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5872 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5873}
5874
5875
5876/** Opcode 0x0f 0x5e - divps Vps, Wps */
5877FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5878{
5879 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5880 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5881}
5882
5883
5884/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5885FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5886{
5887 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5888 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5889}
5890
5891
5892/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5893FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5894{
5895 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5896 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5897}
5898
5899
5900/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5901FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5902{
5903 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5904 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5905}
5906
5907
5908/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5909FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5910{
5911 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5912 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5913}
5914
5915
5916/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5917FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5918{
5919 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5920 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5921}
5922
5923
5924/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5925FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5926{
5927 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5928 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5929}
5930
5931
5932/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5933FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5934{
5935 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5936 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5937}
5938
5939
5940/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5941FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5942{
5943 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5944 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5945}
5946
5947
5948/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5949FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5950{
5951 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5952 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5953}
5954
5955
5956/* Opcode 0xf3 0x0f 0x60 - invalid */
5957
5958
5959/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5960FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5961{
5962 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5963 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5964 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5965}
5966
5967
5968/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5969FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5970{
5971 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5972 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5973}
5974
5975
5976/* Opcode 0xf3 0x0f 0x61 - invalid */
5977
5978
5979/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5980FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5981{
5982 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5983 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5984}
5985
5986
5987/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5988FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5989{
5990 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5991 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5992}
5993
5994
5995/* Opcode 0xf3 0x0f 0x62 - invalid */
5996
5997
5998
5999/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
6000FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
6001{
6002 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6003 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
6004}
6005
6006
6007/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
6008FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
6009{
6010 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6011 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
6012}
6013
6014
6015/* Opcode 0xf3 0x0f 0x63 - invalid */
6016
6017
6018/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
6019FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
6020{
6021 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6022 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6023}
6024
6025
6026/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6027FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6028{
6029 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6030 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6031}
6032
6033
6034/* Opcode 0xf3 0x0f 0x64 - invalid */
6035
6036
6037/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6038FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6039{
6040 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6041 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6042}
6043
6044
6045/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6046FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6047{
6048 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6049 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6050}
6051
6052
6053/* Opcode 0xf3 0x0f 0x65 - invalid */
6054
6055
6056/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6057FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6058{
6059 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6060 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6061}
6062
6063
6064/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6065FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6066{
6067 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6068 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6069}
6070
6071
6072/* Opcode 0xf3 0x0f 0x66 - invalid */
6073
6074
6075/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6076FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6077{
6078 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6079 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6080}
6081
6082
6083/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6084FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6085{
6086 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6087 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6088}
6089
6090
6091/* Opcode 0xf3 0x0f 0x67 - invalid */
6092
6093
6094/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6095 * @note Intel and AMD both uses Qd for the second parameter, however they
6096 * both list it as a mmX/mem64 operand and intel describes it as being
6097 * loaded as a qword, so it should be Qq, shouldn't it? */
6098FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6099{
6100 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6101 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6102}
6103
6104
6105/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6106FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6107{
6108 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6109 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6110}
6111
6112
6113/* Opcode 0xf3 0x0f 0x68 - invalid */
6114
6115
6116/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6117 * @note Intel and AMD both uses Qd for the second parameter, however they
6118 * both list it as a mmX/mem64 operand and intel describes it as being
6119 * loaded as a qword, so it should be Qq, shouldn't it? */
6120FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6121{
6122 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6123 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6124}
6125
6126
6127/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6128FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6129{
6130 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6131 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6132
6133}
6134
6135
6136/* Opcode 0xf3 0x0f 0x69 - invalid */
6137
6138
6139/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6140 * @note Intel and AMD both uses Qd for the second parameter, however they
6141 * both list it as a mmX/mem64 operand and intel describes it as being
6142 * loaded as a qword, so it should be Qq, shouldn't it? */
6143FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6144{
6145 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6146 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6147}
6148
6149
6150/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6151FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6152{
6153 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6154 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6155}
6156
6157
6158/* Opcode 0xf3 0x0f 0x6a - invalid */
6159
6160
6161/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6162FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6163{
6164 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6165 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6166}
6167
6168
6169/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6170FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6171{
6172 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6173 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6174}
6175
6176
6177/* Opcode 0xf3 0x0f 0x6b - invalid */
6178
6179
6180/* Opcode 0x0f 0x6c - invalid */
6181
6182
6183/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6184FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6185{
6186 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6187 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6188}
6189
6190
6191/* Opcode 0xf3 0x0f 0x6c - invalid */
6192/* Opcode 0xf2 0x0f 0x6c - invalid */
6193
6194
6195/* Opcode 0x0f 0x6d - invalid */
6196
6197
6198/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6199FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6200{
6201 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6202 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6203}
6204
6205
6206/* Opcode 0xf3 0x0f 0x6d - invalid */
6207
6208
6209FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6210{
6211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6212 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6213 {
6214 /**
6215 * @opcode 0x6e
6216 * @opcodesub rex.w=1
6217 * @oppfx none
6218 * @opcpuid mmx
6219 * @opgroup og_mmx_datamove
6220 * @opxcpttype 5
6221 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6222 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6223 */
6224 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6225 if (IEM_IS_MODRM_REG_MODE(bRm))
6226 {
6227 /* MMX, greg64 */
6228 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6230 IEM_MC_LOCAL(uint64_t, u64Tmp);
6231
6232 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6233 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6234 IEM_MC_FPU_TO_MMX_MODE();
6235
6236 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6237 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6238
6239 IEM_MC_ADVANCE_RIP_AND_FINISH();
6240 IEM_MC_END();
6241 }
6242 else
6243 {
6244 /* MMX, [mem64] */
6245 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6247 IEM_MC_LOCAL(uint64_t, u64Tmp);
6248
6249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6251 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6252 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6253 IEM_MC_FPU_TO_MMX_MODE();
6254
6255 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6256 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6257
6258 IEM_MC_ADVANCE_RIP_AND_FINISH();
6259 IEM_MC_END();
6260 }
6261 }
6262 else
6263 {
6264 /**
6265 * @opdone
6266 * @opcode 0x6e
6267 * @opcodesub rex.w=0
6268 * @oppfx none
6269 * @opcpuid mmx
6270 * @opgroup og_mmx_datamove
6271 * @opxcpttype 5
6272 * @opfunction iemOp_movd_q_Pd_Ey
6273 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6274 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6275 */
6276 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6277 if (IEM_IS_MODRM_REG_MODE(bRm))
6278 {
6279 /* MMX, greg32 */
6280 IEM_MC_BEGIN(0, 1, 0, 0);
6281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6282 IEM_MC_LOCAL(uint32_t, u32Tmp);
6283
6284 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6285 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6286 IEM_MC_FPU_TO_MMX_MODE();
6287
6288 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6289 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6290
6291 IEM_MC_ADVANCE_RIP_AND_FINISH();
6292 IEM_MC_END();
6293 }
6294 else
6295 {
6296 /* MMX, [mem32] */
6297 IEM_MC_BEGIN(0, 2, 0, 0);
6298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6299 IEM_MC_LOCAL(uint32_t, u32Tmp);
6300
6301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6303 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6304 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6305 IEM_MC_FPU_TO_MMX_MODE();
6306
6307 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6308 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6309
6310 IEM_MC_ADVANCE_RIP_AND_FINISH();
6311 IEM_MC_END();
6312 }
6313 }
6314}
6315
6316FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6317{
6318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6319 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6320 {
6321 /**
6322 * @opcode 0x6e
6323 * @opcodesub rex.w=1
6324 * @oppfx 0x66
6325 * @opcpuid sse2
6326 * @opgroup og_sse2_simdint_datamove
6327 * @opxcpttype 5
6328 * @optest 64-bit / op1=1 op2=2 -> op1=2
6329 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6330 */
6331 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6332 if (IEM_IS_MODRM_REG_MODE(bRm))
6333 {
6334 /* XMM, greg64 */
6335 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6337 IEM_MC_LOCAL(uint64_t, u64Tmp);
6338
6339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6340 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6341
6342 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6343 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6344
6345 IEM_MC_ADVANCE_RIP_AND_FINISH();
6346 IEM_MC_END();
6347 }
6348 else
6349 {
6350 /* XMM, [mem64] */
6351 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6353 IEM_MC_LOCAL(uint64_t, u64Tmp);
6354
6355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6357 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6358 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6359
6360 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6361 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6362
6363 IEM_MC_ADVANCE_RIP_AND_FINISH();
6364 IEM_MC_END();
6365 }
6366 }
6367 else
6368 {
6369 /**
6370 * @opdone
6371 * @opcode 0x6e
6372 * @opcodesub rex.w=0
6373 * @oppfx 0x66
6374 * @opcpuid sse2
6375 * @opgroup og_sse2_simdint_datamove
6376 * @opxcpttype 5
6377 * @opfunction iemOp_movd_q_Vy_Ey
6378 * @optest op1=1 op2=2 -> op1=2
6379 * @optest op1=0 op2=-42 -> op1=-42
6380 */
6381 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6382 if (IEM_IS_MODRM_REG_MODE(bRm))
6383 {
6384 /* XMM, greg32 */
6385 IEM_MC_BEGIN(0, 1, 0, 0);
6386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6387 IEM_MC_LOCAL(uint32_t, u32Tmp);
6388
6389 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6390 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6391
6392 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6393 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6394
6395 IEM_MC_ADVANCE_RIP_AND_FINISH();
6396 IEM_MC_END();
6397 }
6398 else
6399 {
6400 /* XMM, [mem32] */
6401 IEM_MC_BEGIN(0, 2, 0, 0);
6402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6403 IEM_MC_LOCAL(uint32_t, u32Tmp);
6404
6405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6407 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6408 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6409
6410 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6411 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6412
6413 IEM_MC_ADVANCE_RIP_AND_FINISH();
6414 IEM_MC_END();
6415 }
6416 }
6417}
6418
6419/* Opcode 0xf3 0x0f 0x6e - invalid */
6420
6421
6422/**
6423 * @opcode 0x6f
6424 * @oppfx none
6425 * @opcpuid mmx
6426 * @opgroup og_mmx_datamove
6427 * @opxcpttype 5
6428 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6429 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6430 */
6431FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6432{
6433 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6435 if (IEM_IS_MODRM_REG_MODE(bRm))
6436 {
6437 /*
6438 * Register, register.
6439 */
6440 IEM_MC_BEGIN(0, 1, 0, 0);
6441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6442 IEM_MC_LOCAL(uint64_t, u64Tmp);
6443
6444 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6445 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6446 IEM_MC_FPU_TO_MMX_MODE();
6447
6448 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6449 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6450
6451 IEM_MC_ADVANCE_RIP_AND_FINISH();
6452 IEM_MC_END();
6453 }
6454 else
6455 {
6456 /*
6457 * Register, memory.
6458 */
6459 IEM_MC_BEGIN(0, 2, 0, 0);
6460 IEM_MC_LOCAL(uint64_t, u64Tmp);
6461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6462
6463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6465 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6467 IEM_MC_FPU_TO_MMX_MODE();
6468
6469 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6470 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6471
6472 IEM_MC_ADVANCE_RIP_AND_FINISH();
6473 IEM_MC_END();
6474 }
6475}
6476
6477/**
6478 * @opcode 0x6f
6479 * @oppfx 0x66
6480 * @opcpuid sse2
6481 * @opgroup og_sse2_simdint_datamove
6482 * @opxcpttype 1
6483 * @optest op1=1 op2=2 -> op1=2
6484 * @optest op1=0 op2=-42 -> op1=-42
6485 */
6486FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6487{
6488 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6490 if (IEM_IS_MODRM_REG_MODE(bRm))
6491 {
6492 /*
6493 * Register, register.
6494 */
6495 IEM_MC_BEGIN(0, 0, 0, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6497
6498 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6499 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6500
6501 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6502 IEM_GET_MODRM_RM(pVCpu, bRm));
6503 IEM_MC_ADVANCE_RIP_AND_FINISH();
6504 IEM_MC_END();
6505 }
6506 else
6507 {
6508 /*
6509 * Register, memory.
6510 */
6511 IEM_MC_BEGIN(0, 2, 0, 0);
6512 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6514
6515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6517 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6518 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6519
6520 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6521 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6522
6523 IEM_MC_ADVANCE_RIP_AND_FINISH();
6524 IEM_MC_END();
6525 }
6526}
6527
6528/**
6529 * @opcode 0x6f
6530 * @oppfx 0xf3
6531 * @opcpuid sse2
6532 * @opgroup og_sse2_simdint_datamove
6533 * @opxcpttype 4UA
6534 * @optest op1=1 op2=2 -> op1=2
6535 * @optest op1=0 op2=-42 -> op1=-42
6536 */
6537FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6538{
6539 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6541 if (IEM_IS_MODRM_REG_MODE(bRm))
6542 {
6543 /*
6544 * Register, register.
6545 */
6546 IEM_MC_BEGIN(0, 0, 0, 0);
6547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6548 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6549 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6550 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6551 IEM_GET_MODRM_RM(pVCpu, bRm));
6552 IEM_MC_ADVANCE_RIP_AND_FINISH();
6553 IEM_MC_END();
6554 }
6555 else
6556 {
6557 /*
6558 * Register, memory.
6559 */
6560 IEM_MC_BEGIN(0, 2, 0, 0);
6561 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6563
6564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6566 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6567 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6568 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6569 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6570
6571 IEM_MC_ADVANCE_RIP_AND_FINISH();
6572 IEM_MC_END();
6573 }
6574}
6575
6576
6577/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6578FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6579{
6580 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6582 if (IEM_IS_MODRM_REG_MODE(bRm))
6583 {
6584 /*
6585 * Register, register.
6586 */
6587 IEM_MC_BEGIN(3, 0, 0, 0);
6588 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6590 IEM_MC_ARG(uint64_t *, pDst, 0);
6591 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6592 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6593 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6594 IEM_MC_PREPARE_FPU_USAGE();
6595 IEM_MC_FPU_TO_MMX_MODE();
6596
6597 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6598 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6600 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6601
6602 IEM_MC_ADVANCE_RIP_AND_FINISH();
6603 IEM_MC_END();
6604 }
6605 else
6606 {
6607 /*
6608 * Register, memory.
6609 */
6610 IEM_MC_BEGIN(3, 2, 0, 0);
6611 IEM_MC_ARG(uint64_t *, pDst, 0);
6612 IEM_MC_LOCAL(uint64_t, uSrc);
6613 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6615
6616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6617 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6618 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6620 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6621 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6622
6623 IEM_MC_PREPARE_FPU_USAGE();
6624 IEM_MC_FPU_TO_MMX_MODE();
6625
6626 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6627 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6628 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6629
6630 IEM_MC_ADVANCE_RIP_AND_FINISH();
6631 IEM_MC_END();
6632 }
6633}
6634
6635
6636/**
6637 * Common worker for SSE2 instructions on the forms:
6638 * pshufd xmm1, xmm2/mem128, imm8
6639 * pshufhw xmm1, xmm2/mem128, imm8
6640 * pshuflw xmm1, xmm2/mem128, imm8
6641 *
6642 * Proper alignment of the 128-bit operand is enforced.
6643 * Exceptions type 4. SSE2 cpuid checks.
6644 */
6645FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6646{
6647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6648 if (IEM_IS_MODRM_REG_MODE(bRm))
6649 {
6650 /*
6651 * Register, register.
6652 */
6653 IEM_MC_BEGIN(3, 0, 0, 0);
6654 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6656 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6657 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6658 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6659 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6660 IEM_MC_PREPARE_SSE_USAGE();
6661 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6662 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6663 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6664 IEM_MC_ADVANCE_RIP_AND_FINISH();
6665 IEM_MC_END();
6666 }
6667 else
6668 {
6669 /*
6670 * Register, memory.
6671 */
6672 IEM_MC_BEGIN(3, 2, 0, 0);
6673 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6674 IEM_MC_LOCAL(RTUINT128U, uSrc);
6675 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6677
6678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6679 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6680 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6682 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6683
6684 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6685 IEM_MC_PREPARE_SSE_USAGE();
6686 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6687 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6688
6689 IEM_MC_ADVANCE_RIP_AND_FINISH();
6690 IEM_MC_END();
6691 }
6692}
6693
6694
6695/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6696FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6697{
6698 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6699 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6700}
6701
6702
6703/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6704FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6705{
6706 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6707 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6708}
6709
6710
6711/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6712FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6713{
6714 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6715 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6716}
6717
6718
6719/**
6720 * Common worker for MMX instructions of the form:
6721 * psrlw mm, imm8
6722 * psraw mm, imm8
6723 * psllw mm, imm8
6724 * psrld mm, imm8
6725 * psrad mm, imm8
6726 * pslld mm, imm8
6727 * psrlq mm, imm8
6728 * psllq mm, imm8
6729 *
6730 */
6731FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6732{
6733 if (IEM_IS_MODRM_REG_MODE(bRm))
6734 {
6735 /*
6736 * Register, immediate.
6737 */
6738 IEM_MC_BEGIN(2, 0, 0, 0);
6739 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6741 IEM_MC_ARG(uint64_t *, pDst, 0);
6742 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6743 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6744 IEM_MC_PREPARE_FPU_USAGE();
6745 IEM_MC_FPU_TO_MMX_MODE();
6746
6747 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6748 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6749 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6750
6751 IEM_MC_ADVANCE_RIP_AND_FINISH();
6752 IEM_MC_END();
6753 }
6754 else
6755 {
6756 /*
6757 * Register, memory not supported.
6758 */
6759 /// @todo Caller already enforced register mode?!
6760 AssertFailedReturn(VINF_SUCCESS);
6761 }
6762}
6763
6764
6765/**
6766 * Common worker for SSE2 instructions of the form:
6767 * psrlw xmm, imm8
6768 * psraw xmm, imm8
6769 * psllw xmm, imm8
6770 * psrld xmm, imm8
6771 * psrad xmm, imm8
6772 * pslld xmm, imm8
6773 * psrlq xmm, imm8
6774 * psllq xmm, imm8
6775 *
6776 */
6777FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6778{
6779 if (IEM_IS_MODRM_REG_MODE(bRm))
6780 {
6781 /*
6782 * Register, immediate.
6783 */
6784 IEM_MC_BEGIN(2, 0, 0, 0);
6785 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6787 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6788 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6789 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6790 IEM_MC_PREPARE_SSE_USAGE();
6791 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6792 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6793 IEM_MC_ADVANCE_RIP_AND_FINISH();
6794 IEM_MC_END();
6795 }
6796 else
6797 {
6798 /*
6799 * Register, memory.
6800 */
6801 /// @todo Caller already enforced register mode?!
6802 AssertFailedReturn(VINF_SUCCESS);
6803 }
6804}
6805
6806
6807/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6808FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6809{
6810// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6811 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6812}
6813
6814
6815/** Opcode 0x66 0x0f 0x71 11/2. */
6816FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6817{
6818// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6819 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6820}
6821
6822
6823/** Opcode 0x0f 0x71 11/4. */
6824FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6825{
6826// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6827 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6828}
6829
6830
6831/** Opcode 0x66 0x0f 0x71 11/4. */
6832FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6833{
6834// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6835 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6836}
6837
6838
6839/** Opcode 0x0f 0x71 11/6. */
6840FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6841{
6842// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6843 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6844}
6845
6846
6847/** Opcode 0x66 0x0f 0x71 11/6. */
6848FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6849{
6850// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6851 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6852}
6853
6854
6855/**
6856 * Group 12 jump table for register variant.
6857 */
6858IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6859{
6860 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6861 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6862 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6863 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6864 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6865 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6866 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6867 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6868};
6869AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6870
6871
6872/** Opcode 0x0f 0x71. */
6873FNIEMOP_DEF(iemOp_Grp12)
6874{
6875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6876 if (IEM_IS_MODRM_REG_MODE(bRm))
6877 /* register, register */
6878 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6879 + pVCpu->iem.s.idxPrefix], bRm);
6880 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6881}
6882
6883
6884/** Opcode 0x0f 0x72 11/2. */
6885FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6886{
6887// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6888 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6889}
6890
6891
6892/** Opcode 0x66 0x0f 0x72 11/2. */
6893FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6894{
6895// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6896 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6897}
6898
6899
6900/** Opcode 0x0f 0x72 11/4. */
6901FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6902{
6903// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6904 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6905}
6906
6907
6908/** Opcode 0x66 0x0f 0x72 11/4. */
6909FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6910{
6911// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6912 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6913}
6914
6915
6916/** Opcode 0x0f 0x72 11/6. */
6917FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6918{
6919// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6920 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6921}
6922
6923/** Opcode 0x66 0x0f 0x72 11/6. */
6924FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6925{
6926// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6927 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6928}
6929
6930
6931/**
6932 * Group 13 jump table for register variant.
6933 */
6934IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6935{
6936 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6937 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6938 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6939 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6940 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6941 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6942 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6943 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6944};
6945AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6946
6947/** Opcode 0x0f 0x72. */
6948FNIEMOP_DEF(iemOp_Grp13)
6949{
6950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6951 if (IEM_IS_MODRM_REG_MODE(bRm))
6952 /* register, register */
6953 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6954 + pVCpu->iem.s.idxPrefix], bRm);
6955 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6956}
6957
6958
6959/** Opcode 0x0f 0x73 11/2. */
6960FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6961{
6962// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6963 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6964}
6965
6966
6967/** Opcode 0x66 0x0f 0x73 11/2. */
6968FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6969{
6970// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6971 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6972}
6973
6974
6975/** Opcode 0x66 0x0f 0x73 11/3. */
6976FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6977{
6978// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6979 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6980}
6981
6982
6983/** Opcode 0x0f 0x73 11/6. */
6984FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6985{
6986// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6987 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6988}
6989
6990
6991/** Opcode 0x66 0x0f 0x73 11/6. */
6992FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6993{
6994// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6995 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6996}
6997
6998
6999/** Opcode 0x66 0x0f 0x73 11/7. */
7000FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
7001{
7002// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7003 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
7004}
7005
7006/**
7007 * Group 14 jump table for register variant.
7008 */
7009IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
7010{
7011 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7012 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7013 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7014 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7015 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7016 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7017 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7018 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7019};
7020AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
7021
7022
7023/** Opcode 0x0f 0x73. */
7024FNIEMOP_DEF(iemOp_Grp14)
7025{
7026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7027 if (IEM_IS_MODRM_REG_MODE(bRm))
7028 /* register, register */
7029 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7030 + pVCpu->iem.s.idxPrefix], bRm);
7031 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7032}
7033
7034
7035/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7036FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7037{
7038 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7039 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7040}
7041
7042
7043/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7044FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7045{
7046 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7047 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7048}
7049
7050
7051/* Opcode 0xf3 0x0f 0x74 - invalid */
7052/* Opcode 0xf2 0x0f 0x74 - invalid */
7053
7054
7055/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7056FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7057{
7058 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7059 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7060}
7061
7062
7063/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7064FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7065{
7066 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7067 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7068}
7069
7070
7071/* Opcode 0xf3 0x0f 0x75 - invalid */
7072/* Opcode 0xf2 0x0f 0x75 - invalid */
7073
7074
7075/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7076FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7077{
7078 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7079 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7080}
7081
7082
7083/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7084FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7085{
7086 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7087 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7088}
7089
7090
7091/* Opcode 0xf3 0x0f 0x76 - invalid */
7092/* Opcode 0xf2 0x0f 0x76 - invalid */
7093
7094
7095/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7096FNIEMOP_DEF(iemOp_emms)
7097{
7098 IEMOP_MNEMONIC(emms, "emms");
7099 IEM_MC_BEGIN(0, 0, 0, 0);
7100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7101 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7102 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7103 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7104 IEM_MC_FPU_FROM_MMX_MODE();
7105 IEM_MC_ADVANCE_RIP_AND_FINISH();
7106 IEM_MC_END();
7107}
7108
7109/* Opcode 0x66 0x0f 0x77 - invalid */
7110/* Opcode 0xf3 0x0f 0x77 - invalid */
7111/* Opcode 0xf2 0x0f 0x77 - invalid */
7112
7113/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7114#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7115FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7116{
7117 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7118 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7119 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7120 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7121
7122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7123 if (IEM_IS_MODRM_REG_MODE(bRm))
7124 {
7125 /*
7126 * Register, register.
7127 */
7128 if (enmEffOpSize == IEMMODE_64BIT)
7129 {
7130 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7131 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7132 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7133 IEM_MC_ARG(uint64_t, u64Enc, 1);
7134 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7135 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7136 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_RM(pVCpu, bRm));
7137 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7138 IEM_MC_END();
7139 }
7140 else
7141 {
7142 IEM_MC_BEGIN(2, 0, 0, 0);
7143 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7144 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7145 IEM_MC_ARG(uint32_t, u32Enc, 1);
7146 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7147 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7148 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_RM(pVCpu, bRm));
7149 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7150 IEM_MC_END();
7151 }
7152 }
7153 else
7154 {
7155 /*
7156 * Memory, register.
7157 */
7158 if (enmEffOpSize == IEMMODE_64BIT)
7159 {
7160 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7161 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7163 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7164 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7165 IEM_MC_ARG(uint64_t, u64Enc, 2);
7166 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7167 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7168 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7169 IEM_MC_END();
7170 }
7171 else
7172 {
7173 IEM_MC_BEGIN(3, 0, 0, 0);
7174 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7176 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7177 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7178 IEM_MC_ARG(uint32_t, u32Enc, 2);
7179 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7180 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7181 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7182 IEM_MC_END();
7183 }
7184 }
7185}
7186#else
7187FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7188#endif
7189
7190/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7191FNIEMOP_STUB(iemOp_AmdGrp17);
7192/* Opcode 0xf3 0x0f 0x78 - invalid */
7193/* Opcode 0xf2 0x0f 0x78 - invalid */
7194
7195/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7196#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7197FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7198{
7199 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7200 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7201 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7202 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7203
7204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7205 if (IEM_IS_MODRM_REG_MODE(bRm))
7206 {
7207 /*
7208 * Register, register.
7209 */
7210 if (enmEffOpSize == IEMMODE_64BIT)
7211 {
7212 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7213 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7214 IEM_MC_ARG(uint64_t, u64Val, 0);
7215 IEM_MC_ARG(uint64_t, u64Enc, 1);
7216 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7217 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7218 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7219 IEM_MC_END();
7220 }
7221 else
7222 {
7223 IEM_MC_BEGIN(2, 0, 0, 0);
7224 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7225 IEM_MC_ARG(uint32_t, u32Val, 0);
7226 IEM_MC_ARG(uint32_t, u32Enc, 1);
7227 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7228 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7229 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7230 IEM_MC_END();
7231 }
7232 }
7233 else
7234 {
7235 /*
7236 * Register, memory.
7237 */
7238 if (enmEffOpSize == IEMMODE_64BIT)
7239 {
7240 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7241 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7243 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7244 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7245 IEM_MC_ARG(uint64_t, u64Enc, 2);
7246 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7247 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7248 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7249 IEM_MC_END();
7250 }
7251 else
7252 {
7253 IEM_MC_BEGIN(3, 0, 0, 0);
7254 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7256 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7257 IEM_MC_ARG(uint32_t, u32Enc, 2);
7258 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7259 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7260 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7261 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7262 IEM_MC_END();
7263 }
7264 }
7265}
7266#else
7267FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7268#endif
7269/* Opcode 0x66 0x0f 0x79 - invalid */
7270/* Opcode 0xf3 0x0f 0x79 - invalid */
7271/* Opcode 0xf2 0x0f 0x79 - invalid */
7272
7273/* Opcode 0x0f 0x7a - invalid */
7274/* Opcode 0x66 0x0f 0x7a - invalid */
7275/* Opcode 0xf3 0x0f 0x7a - invalid */
7276/* Opcode 0xf2 0x0f 0x7a - invalid */
7277
7278/* Opcode 0x0f 0x7b - invalid */
7279/* Opcode 0x66 0x0f 0x7b - invalid */
7280/* Opcode 0xf3 0x0f 0x7b - invalid */
7281/* Opcode 0xf2 0x0f 0x7b - invalid */
7282
7283/* Opcode 0x0f 0x7c - invalid */
7284
7285
7286/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7287FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7288{
7289 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7290 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7291}
7292
7293
7294/* Opcode 0xf3 0x0f 0x7c - invalid */
7295
7296
7297/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7298FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7299{
7300 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7301 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7302}
7303
7304
7305/* Opcode 0x0f 0x7d - invalid */
7306
7307
7308/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7309FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7310{
7311 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7312 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7313}
7314
7315
7316/* Opcode 0xf3 0x0f 0x7d - invalid */
7317
7318
7319/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7320FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7321{
7322 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7323 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7324}
7325
7326
7327/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7328FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7329{
7330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7331 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7332 {
7333 /**
7334 * @opcode 0x7e
7335 * @opcodesub rex.w=1
7336 * @oppfx none
7337 * @opcpuid mmx
7338 * @opgroup og_mmx_datamove
7339 * @opxcpttype 5
7340 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7341 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7342 */
7343 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7344 if (IEM_IS_MODRM_REG_MODE(bRm))
7345 {
7346 /* greg64, MMX */
7347 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7349 IEM_MC_LOCAL(uint64_t, u64Tmp);
7350
7351 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7352 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7353 IEM_MC_FPU_TO_MMX_MODE();
7354
7355 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7356 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7357
7358 IEM_MC_ADVANCE_RIP_AND_FINISH();
7359 IEM_MC_END();
7360 }
7361 else
7362 {
7363 /* [mem64], MMX */
7364 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7366 IEM_MC_LOCAL(uint64_t, u64Tmp);
7367
7368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7370 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7371 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7372 IEM_MC_FPU_TO_MMX_MODE();
7373
7374 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7375 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7376
7377 IEM_MC_ADVANCE_RIP_AND_FINISH();
7378 IEM_MC_END();
7379 }
7380 }
7381 else
7382 {
7383 /**
7384 * @opdone
7385 * @opcode 0x7e
7386 * @opcodesub rex.w=0
7387 * @oppfx none
7388 * @opcpuid mmx
7389 * @opgroup og_mmx_datamove
7390 * @opxcpttype 5
7391 * @opfunction iemOp_movd_q_Pd_Ey
7392 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7393 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7394 */
7395 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7396 if (IEM_IS_MODRM_REG_MODE(bRm))
7397 {
7398 /* greg32, MMX */
7399 IEM_MC_BEGIN(0, 1, 0, 0);
7400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7401 IEM_MC_LOCAL(uint32_t, u32Tmp);
7402
7403 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7404 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7405 IEM_MC_FPU_TO_MMX_MODE();
7406
7407 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7408 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7409
7410 IEM_MC_ADVANCE_RIP_AND_FINISH();
7411 IEM_MC_END();
7412 }
7413 else
7414 {
7415 /* [mem32], MMX */
7416 IEM_MC_BEGIN(0, 2, 0, 0);
7417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7418 IEM_MC_LOCAL(uint32_t, u32Tmp);
7419
7420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7422 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7423 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7424 IEM_MC_FPU_TO_MMX_MODE();
7425
7426 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7427 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7428
7429 IEM_MC_ADVANCE_RIP_AND_FINISH();
7430 IEM_MC_END();
7431 }
7432 }
7433}
7434
7435
7436FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7437{
7438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7439 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7440 {
7441 /**
7442 * @opcode 0x7e
7443 * @opcodesub rex.w=1
7444 * @oppfx 0x66
7445 * @opcpuid sse2
7446 * @opgroup og_sse2_simdint_datamove
7447 * @opxcpttype 5
7448 * @optest 64-bit / op1=1 op2=2 -> op1=2
7449 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7450 */
7451 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7452 if (IEM_IS_MODRM_REG_MODE(bRm))
7453 {
7454 /* greg64, XMM */
7455 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7457 IEM_MC_LOCAL(uint64_t, u64Tmp);
7458
7459 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7460 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7461
7462 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7463 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7464
7465 IEM_MC_ADVANCE_RIP_AND_FINISH();
7466 IEM_MC_END();
7467 }
7468 else
7469 {
7470 /* [mem64], XMM */
7471 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7473 IEM_MC_LOCAL(uint64_t, u64Tmp);
7474
7475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7477 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7478 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7479
7480 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7481 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7482
7483 IEM_MC_ADVANCE_RIP_AND_FINISH();
7484 IEM_MC_END();
7485 }
7486 }
7487 else
7488 {
7489 /**
7490 * @opdone
7491 * @opcode 0x7e
7492 * @opcodesub rex.w=0
7493 * @oppfx 0x66
7494 * @opcpuid sse2
7495 * @opgroup og_sse2_simdint_datamove
7496 * @opxcpttype 5
7497 * @opfunction iemOp_movd_q_Vy_Ey
7498 * @optest op1=1 op2=2 -> op1=2
7499 * @optest op1=0 op2=-42 -> op1=-42
7500 */
7501 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7502 if (IEM_IS_MODRM_REG_MODE(bRm))
7503 {
7504 /* greg32, XMM */
7505 IEM_MC_BEGIN(0, 1, 0, 0);
7506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7507 IEM_MC_LOCAL(uint32_t, u32Tmp);
7508
7509 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7510 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7511
7512 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7513 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7514
7515 IEM_MC_ADVANCE_RIP_AND_FINISH();
7516 IEM_MC_END();
7517 }
7518 else
7519 {
7520 /* [mem32], XMM */
7521 IEM_MC_BEGIN(0, 2, 0, 0);
7522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7523 IEM_MC_LOCAL(uint32_t, u32Tmp);
7524
7525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7527 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7529
7530 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7531 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7532
7533 IEM_MC_ADVANCE_RIP_AND_FINISH();
7534 IEM_MC_END();
7535 }
7536 }
7537}
7538
7539/**
7540 * @opcode 0x7e
7541 * @oppfx 0xf3
7542 * @opcpuid sse2
7543 * @opgroup og_sse2_pcksclr_datamove
7544 * @opxcpttype none
7545 * @optest op1=1 op2=2 -> op1=2
7546 * @optest op1=0 op2=-42 -> op1=-42
7547 */
7548FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7549{
7550 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7552 if (IEM_IS_MODRM_REG_MODE(bRm))
7553 {
7554 /*
7555 * XMM128, XMM64.
7556 */
7557 IEM_MC_BEGIN(0, 2, 0, 0);
7558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7559 IEM_MC_LOCAL(uint64_t, uSrc);
7560
7561 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7562 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7563
7564 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7565 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7566
7567 IEM_MC_ADVANCE_RIP_AND_FINISH();
7568 IEM_MC_END();
7569 }
7570 else
7571 {
7572 /*
7573 * XMM128, [mem64].
7574 */
7575 IEM_MC_BEGIN(0, 2, 0, 0);
7576 IEM_MC_LOCAL(uint64_t, uSrc);
7577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7578
7579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7581 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7582 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7583
7584 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7585 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7586
7587 IEM_MC_ADVANCE_RIP_AND_FINISH();
7588 IEM_MC_END();
7589 }
7590}
7591
7592/* Opcode 0xf2 0x0f 0x7e - invalid */
7593
7594
7595/** Opcode 0x0f 0x7f - movq Qq, Pq */
7596FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7597{
7598 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7600 if (IEM_IS_MODRM_REG_MODE(bRm))
7601 {
7602 /*
7603 * MMX, MMX.
7604 */
7605 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7606 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7607 IEM_MC_BEGIN(0, 1, 0, 0);
7608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7609 IEM_MC_LOCAL(uint64_t, u64Tmp);
7610 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7611 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7612 IEM_MC_FPU_TO_MMX_MODE();
7613
7614 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7615 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7616
7617 IEM_MC_ADVANCE_RIP_AND_FINISH();
7618 IEM_MC_END();
7619 }
7620 else
7621 {
7622 /*
7623 * [mem64], MMX.
7624 */
7625 IEM_MC_BEGIN(0, 2, 0, 0);
7626 IEM_MC_LOCAL(uint64_t, u64Tmp);
7627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7628
7629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7631 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7632 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7633 IEM_MC_FPU_TO_MMX_MODE();
7634
7635 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7636 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7637
7638 IEM_MC_ADVANCE_RIP_AND_FINISH();
7639 IEM_MC_END();
7640 }
7641}
7642
7643/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7644FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7645{
7646 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7648 if (IEM_IS_MODRM_REG_MODE(bRm))
7649 {
7650 /*
7651 * XMM, XMM.
7652 */
7653 IEM_MC_BEGIN(0, 0, 0, 0);
7654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7656 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7657 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7658 IEM_GET_MODRM_REG(pVCpu, bRm));
7659 IEM_MC_ADVANCE_RIP_AND_FINISH();
7660 IEM_MC_END();
7661 }
7662 else
7663 {
7664 /*
7665 * [mem128], XMM.
7666 */
7667 IEM_MC_BEGIN(0, 2, 0, 0);
7668 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7670
7671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7673 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7674 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7675
7676 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7677 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7678
7679 IEM_MC_ADVANCE_RIP_AND_FINISH();
7680 IEM_MC_END();
7681 }
7682}
7683
7684/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7685FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7686{
7687 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7689 if (IEM_IS_MODRM_REG_MODE(bRm))
7690 {
7691 /*
7692 * XMM, XMM.
7693 */
7694 IEM_MC_BEGIN(0, 0, 0, 0);
7695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7696 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7697 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7698 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7699 IEM_GET_MODRM_REG(pVCpu, bRm));
7700 IEM_MC_ADVANCE_RIP_AND_FINISH();
7701 IEM_MC_END();
7702 }
7703 else
7704 {
7705 /*
7706 * [mem128], XMM.
7707 */
7708 IEM_MC_BEGIN(0, 2, 0, 0);
7709 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7711
7712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7714 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7715 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7716
7717 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7718 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7719
7720 IEM_MC_ADVANCE_RIP_AND_FINISH();
7721 IEM_MC_END();
7722 }
7723}
7724
7725/* Opcode 0xf2 0x0f 0x7f - invalid */
7726
7727
7728
7729/** Opcode 0x0f 0x80. */
7730FNIEMOP_DEF(iemOp_jo_Jv)
7731{
7732 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7733 IEMOP_HLP_MIN_386();
7734 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7735 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7736 {
7737 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7738 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7741 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7742 } IEM_MC_ELSE() {
7743 IEM_MC_ADVANCE_RIP_AND_FINISH();
7744 } IEM_MC_ENDIF();
7745 IEM_MC_END();
7746 }
7747 else
7748 {
7749 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7750 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7752 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7753 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7754 } IEM_MC_ELSE() {
7755 IEM_MC_ADVANCE_RIP_AND_FINISH();
7756 } IEM_MC_ENDIF();
7757 IEM_MC_END();
7758 }
7759}
7760
7761
7762/** Opcode 0x0f 0x81. */
7763FNIEMOP_DEF(iemOp_jno_Jv)
7764{
7765 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7766 IEMOP_HLP_MIN_386();
7767 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7768 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7769 {
7770 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7771 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7773 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7774 IEM_MC_ADVANCE_RIP_AND_FINISH();
7775 } IEM_MC_ELSE() {
7776 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7777 } IEM_MC_ENDIF();
7778 IEM_MC_END();
7779 }
7780 else
7781 {
7782 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7783 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7785 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7786 IEM_MC_ADVANCE_RIP_AND_FINISH();
7787 } IEM_MC_ELSE() {
7788 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7789 } IEM_MC_ENDIF();
7790 IEM_MC_END();
7791 }
7792}
7793
7794
7795/** Opcode 0x0f 0x82. */
7796FNIEMOP_DEF(iemOp_jc_Jv)
7797{
7798 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7799 IEMOP_HLP_MIN_386();
7800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7801 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7802 {
7803 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7804 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7806 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7807 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7808 } IEM_MC_ELSE() {
7809 IEM_MC_ADVANCE_RIP_AND_FINISH();
7810 } IEM_MC_ENDIF();
7811 IEM_MC_END();
7812 }
7813 else
7814 {
7815 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7816 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7819 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7820 } IEM_MC_ELSE() {
7821 IEM_MC_ADVANCE_RIP_AND_FINISH();
7822 } IEM_MC_ENDIF();
7823 IEM_MC_END();
7824 }
7825}
7826
7827
7828/** Opcode 0x0f 0x83. */
7829FNIEMOP_DEF(iemOp_jnc_Jv)
7830{
7831 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7832 IEMOP_HLP_MIN_386();
7833 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7834 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7835 {
7836 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7837 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7839 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7840 IEM_MC_ADVANCE_RIP_AND_FINISH();
7841 } IEM_MC_ELSE() {
7842 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7843 } IEM_MC_ENDIF();
7844 IEM_MC_END();
7845 }
7846 else
7847 {
7848 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7849 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7852 IEM_MC_ADVANCE_RIP_AND_FINISH();
7853 } IEM_MC_ELSE() {
7854 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7855 } IEM_MC_ENDIF();
7856 IEM_MC_END();
7857 }
7858}
7859
7860
7861/** Opcode 0x0f 0x84. */
7862FNIEMOP_DEF(iemOp_je_Jv)
7863{
7864 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7865 IEMOP_HLP_MIN_386();
7866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7867 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7868 {
7869 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7870 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7873 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7874 } IEM_MC_ELSE() {
7875 IEM_MC_ADVANCE_RIP_AND_FINISH();
7876 } IEM_MC_ENDIF();
7877 IEM_MC_END();
7878 }
7879 else
7880 {
7881 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7882 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7885 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7886 } IEM_MC_ELSE() {
7887 IEM_MC_ADVANCE_RIP_AND_FINISH();
7888 } IEM_MC_ENDIF();
7889 IEM_MC_END();
7890 }
7891}
7892
7893
7894/** Opcode 0x0f 0x85. */
7895FNIEMOP_DEF(iemOp_jne_Jv)
7896{
7897 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7898 IEMOP_HLP_MIN_386();
7899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7900 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7901 {
7902 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7903 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7906 IEM_MC_ADVANCE_RIP_AND_FINISH();
7907 } IEM_MC_ELSE() {
7908 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7909 } IEM_MC_ENDIF();
7910 IEM_MC_END();
7911 }
7912 else
7913 {
7914 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7915 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7918 IEM_MC_ADVANCE_RIP_AND_FINISH();
7919 } IEM_MC_ELSE() {
7920 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7921 } IEM_MC_ENDIF();
7922 IEM_MC_END();
7923 }
7924}
7925
7926
7927/** Opcode 0x0f 0x86. */
7928FNIEMOP_DEF(iemOp_jbe_Jv)
7929{
7930 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7931 IEMOP_HLP_MIN_386();
7932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7933 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7934 {
7935 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7936 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7938 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7939 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7940 } IEM_MC_ELSE() {
7941 IEM_MC_ADVANCE_RIP_AND_FINISH();
7942 } IEM_MC_ENDIF();
7943 IEM_MC_END();
7944 }
7945 else
7946 {
7947 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7948 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7950 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7951 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7952 } IEM_MC_ELSE() {
7953 IEM_MC_ADVANCE_RIP_AND_FINISH();
7954 } IEM_MC_ENDIF();
7955 IEM_MC_END();
7956 }
7957}
7958
7959
7960/** Opcode 0x0f 0x87. */
7961FNIEMOP_DEF(iemOp_jnbe_Jv)
7962{
7963 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7964 IEMOP_HLP_MIN_386();
7965 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7966 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7967 {
7968 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7969 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7971 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7972 IEM_MC_ADVANCE_RIP_AND_FINISH();
7973 } IEM_MC_ELSE() {
7974 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7975 } IEM_MC_ENDIF();
7976 IEM_MC_END();
7977 }
7978 else
7979 {
7980 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7981 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7983 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7984 IEM_MC_ADVANCE_RIP_AND_FINISH();
7985 } IEM_MC_ELSE() {
7986 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7987 } IEM_MC_ENDIF();
7988 IEM_MC_END();
7989 }
7990}
7991
7992
7993/** Opcode 0x0f 0x88. */
7994FNIEMOP_DEF(iemOp_js_Jv)
7995{
7996 IEMOP_MNEMONIC(js_Jv, "js Jv");
7997 IEMOP_HLP_MIN_386();
7998 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7999 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8000 {
8001 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8002 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8005 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8006 } IEM_MC_ELSE() {
8007 IEM_MC_ADVANCE_RIP_AND_FINISH();
8008 } IEM_MC_ENDIF();
8009 IEM_MC_END();
8010 }
8011 else
8012 {
8013 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8014 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8016 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8017 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8018 } IEM_MC_ELSE() {
8019 IEM_MC_ADVANCE_RIP_AND_FINISH();
8020 } IEM_MC_ENDIF();
8021 IEM_MC_END();
8022 }
8023}
8024
8025
8026/** Opcode 0x0f 0x89. */
8027FNIEMOP_DEF(iemOp_jns_Jv)
8028{
8029 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8030 IEMOP_HLP_MIN_386();
8031 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8032 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8033 {
8034 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8035 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8038 IEM_MC_ADVANCE_RIP_AND_FINISH();
8039 } IEM_MC_ELSE() {
8040 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8041 } IEM_MC_ENDIF();
8042 IEM_MC_END();
8043 }
8044 else
8045 {
8046 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8047 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8050 IEM_MC_ADVANCE_RIP_AND_FINISH();
8051 } IEM_MC_ELSE() {
8052 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8053 } IEM_MC_ENDIF();
8054 IEM_MC_END();
8055 }
8056}
8057
8058
8059/** Opcode 0x0f 0x8a. */
8060FNIEMOP_DEF(iemOp_jp_Jv)
8061{
8062 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8063 IEMOP_HLP_MIN_386();
8064 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8065 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8066 {
8067 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8068 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8070 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8071 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8072 } IEM_MC_ELSE() {
8073 IEM_MC_ADVANCE_RIP_AND_FINISH();
8074 } IEM_MC_ENDIF();
8075 IEM_MC_END();
8076 }
8077 else
8078 {
8079 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8080 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8082 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8083 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8084 } IEM_MC_ELSE() {
8085 IEM_MC_ADVANCE_RIP_AND_FINISH();
8086 } IEM_MC_ENDIF();
8087 IEM_MC_END();
8088 }
8089}
8090
8091
8092/** Opcode 0x0f 0x8b. */
8093FNIEMOP_DEF(iemOp_jnp_Jv)
8094{
8095 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8096 IEMOP_HLP_MIN_386();
8097 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8098 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8099 {
8100 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8101 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8103 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8104 IEM_MC_ADVANCE_RIP_AND_FINISH();
8105 } IEM_MC_ELSE() {
8106 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8107 } IEM_MC_ENDIF();
8108 IEM_MC_END();
8109 }
8110 else
8111 {
8112 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8113 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8115 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8116 IEM_MC_ADVANCE_RIP_AND_FINISH();
8117 } IEM_MC_ELSE() {
8118 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8119 } IEM_MC_ENDIF();
8120 IEM_MC_END();
8121 }
8122}
8123
8124
8125/** Opcode 0x0f 0x8c. */
8126FNIEMOP_DEF(iemOp_jl_Jv)
8127{
8128 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8129 IEMOP_HLP_MIN_386();
8130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8131 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8132 {
8133 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8134 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8136 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8137 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8138 } IEM_MC_ELSE() {
8139 IEM_MC_ADVANCE_RIP_AND_FINISH();
8140 } IEM_MC_ENDIF();
8141 IEM_MC_END();
8142 }
8143 else
8144 {
8145 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8146 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8148 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8149 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8150 } IEM_MC_ELSE() {
8151 IEM_MC_ADVANCE_RIP_AND_FINISH();
8152 } IEM_MC_ENDIF();
8153 IEM_MC_END();
8154 }
8155}
8156
8157
8158/** Opcode 0x0f 0x8d. */
8159FNIEMOP_DEF(iemOp_jnl_Jv)
8160{
8161 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8162 IEMOP_HLP_MIN_386();
8163 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8164 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8165 {
8166 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8167 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8169 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8170 IEM_MC_ADVANCE_RIP_AND_FINISH();
8171 } IEM_MC_ELSE() {
8172 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8173 } IEM_MC_ENDIF();
8174 IEM_MC_END();
8175 }
8176 else
8177 {
8178 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8179 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8181 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8182 IEM_MC_ADVANCE_RIP_AND_FINISH();
8183 } IEM_MC_ELSE() {
8184 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8185 } IEM_MC_ENDIF();
8186 IEM_MC_END();
8187 }
8188}
8189
8190
8191/** Opcode 0x0f 0x8e. */
8192FNIEMOP_DEF(iemOp_jle_Jv)
8193{
8194 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8195 IEMOP_HLP_MIN_386();
8196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8197 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8198 {
8199 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8200 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8202 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8203 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8204 } IEM_MC_ELSE() {
8205 IEM_MC_ADVANCE_RIP_AND_FINISH();
8206 } IEM_MC_ENDIF();
8207 IEM_MC_END();
8208 }
8209 else
8210 {
8211 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8212 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8214 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8215 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8216 } IEM_MC_ELSE() {
8217 IEM_MC_ADVANCE_RIP_AND_FINISH();
8218 } IEM_MC_ENDIF();
8219 IEM_MC_END();
8220 }
8221}
8222
8223
8224/** Opcode 0x0f 0x8f. */
8225FNIEMOP_DEF(iemOp_jnle_Jv)
8226{
8227 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8228 IEMOP_HLP_MIN_386();
8229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8230 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8231 {
8232 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8233 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8235 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8236 IEM_MC_ADVANCE_RIP_AND_FINISH();
8237 } IEM_MC_ELSE() {
8238 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8239 } IEM_MC_ENDIF();
8240 IEM_MC_END();
8241 }
8242 else
8243 {
8244 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8247 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8248 IEM_MC_ADVANCE_RIP_AND_FINISH();
8249 } IEM_MC_ELSE() {
8250 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8251 } IEM_MC_ENDIF();
8252 IEM_MC_END();
8253 }
8254}
8255
8256
8257/** Opcode 0x0f 0x90. */
8258FNIEMOP_DEF(iemOp_seto_Eb)
8259{
8260 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8261 IEMOP_HLP_MIN_386();
8262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8263
8264 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8265 * any way. AMD says it's "unused", whatever that means. We're
8266 * ignoring for now. */
8267 if (IEM_IS_MODRM_REG_MODE(bRm))
8268 {
8269 /* register target */
8270 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8272 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8273 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8274 } IEM_MC_ELSE() {
8275 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8276 } IEM_MC_ENDIF();
8277 IEM_MC_ADVANCE_RIP_AND_FINISH();
8278 IEM_MC_END();
8279 }
8280 else
8281 {
8282 /* memory target */
8283 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8288 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8289 } IEM_MC_ELSE() {
8290 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8291 } IEM_MC_ENDIF();
8292 IEM_MC_ADVANCE_RIP_AND_FINISH();
8293 IEM_MC_END();
8294 }
8295}
8296
8297
8298/** Opcode 0x0f 0x91. */
8299FNIEMOP_DEF(iemOp_setno_Eb)
8300{
8301 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8302 IEMOP_HLP_MIN_386();
8303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8304
8305 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8306 * any way. AMD says it's "unused", whatever that means. We're
8307 * ignoring for now. */
8308 if (IEM_IS_MODRM_REG_MODE(bRm))
8309 {
8310 /* register target */
8311 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8314 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8315 } IEM_MC_ELSE() {
8316 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8317 } IEM_MC_ENDIF();
8318 IEM_MC_ADVANCE_RIP_AND_FINISH();
8319 IEM_MC_END();
8320 }
8321 else
8322 {
8323 /* memory target */
8324 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8328 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8329 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8330 } IEM_MC_ELSE() {
8331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8332 } IEM_MC_ENDIF();
8333 IEM_MC_ADVANCE_RIP_AND_FINISH();
8334 IEM_MC_END();
8335 }
8336}
8337
8338
8339/** Opcode 0x0f 0x92. */
8340FNIEMOP_DEF(iemOp_setc_Eb)
8341{
8342 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8343 IEMOP_HLP_MIN_386();
8344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8345
8346 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8347 * any way. AMD says it's "unused", whatever that means. We're
8348 * ignoring for now. */
8349 if (IEM_IS_MODRM_REG_MODE(bRm))
8350 {
8351 /* register target */
8352 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8354 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8355 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8356 } IEM_MC_ELSE() {
8357 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8358 } IEM_MC_ENDIF();
8359 IEM_MC_ADVANCE_RIP_AND_FINISH();
8360 IEM_MC_END();
8361 }
8362 else
8363 {
8364 /* memory target */
8365 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8369 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8370 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8371 } IEM_MC_ELSE() {
8372 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8373 } IEM_MC_ENDIF();
8374 IEM_MC_ADVANCE_RIP_AND_FINISH();
8375 IEM_MC_END();
8376 }
8377}
8378
8379
8380/** Opcode 0x0f 0x93. */
8381FNIEMOP_DEF(iemOp_setnc_Eb)
8382{
8383 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8384 IEMOP_HLP_MIN_386();
8385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8386
8387 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8388 * any way. AMD says it's "unused", whatever that means. We're
8389 * ignoring for now. */
8390 if (IEM_IS_MODRM_REG_MODE(bRm))
8391 {
8392 /* register target */
8393 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8395 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8396 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8397 } IEM_MC_ELSE() {
8398 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8399 } IEM_MC_ENDIF();
8400 IEM_MC_ADVANCE_RIP_AND_FINISH();
8401 IEM_MC_END();
8402 }
8403 else
8404 {
8405 /* memory target */
8406 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8410 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8411 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8412 } IEM_MC_ELSE() {
8413 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8414 } IEM_MC_ENDIF();
8415 IEM_MC_ADVANCE_RIP_AND_FINISH();
8416 IEM_MC_END();
8417 }
8418}
8419
8420
8421/** Opcode 0x0f 0x94. */
8422FNIEMOP_DEF(iemOp_sete_Eb)
8423{
8424 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8425 IEMOP_HLP_MIN_386();
8426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8427
8428 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8429 * any way. AMD says it's "unused", whatever that means. We're
8430 * ignoring for now. */
8431 if (IEM_IS_MODRM_REG_MODE(bRm))
8432 {
8433 /* register target */
8434 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8436 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8437 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8438 } IEM_MC_ELSE() {
8439 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8440 } IEM_MC_ENDIF();
8441 IEM_MC_ADVANCE_RIP_AND_FINISH();
8442 IEM_MC_END();
8443 }
8444 else
8445 {
8446 /* memory target */
8447 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8452 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8453 } IEM_MC_ELSE() {
8454 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8455 } IEM_MC_ENDIF();
8456 IEM_MC_ADVANCE_RIP_AND_FINISH();
8457 IEM_MC_END();
8458 }
8459}
8460
8461
8462/** Opcode 0x0f 0x95. */
8463FNIEMOP_DEF(iemOp_setne_Eb)
8464{
8465 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8466 IEMOP_HLP_MIN_386();
8467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8468
8469 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8470 * any way. AMD says it's "unused", whatever that means. We're
8471 * ignoring for now. */
8472 if (IEM_IS_MODRM_REG_MODE(bRm))
8473 {
8474 /* register target */
8475 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8477 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8478 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8479 } IEM_MC_ELSE() {
8480 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8481 } IEM_MC_ENDIF();
8482 IEM_MC_ADVANCE_RIP_AND_FINISH();
8483 IEM_MC_END();
8484 }
8485 else
8486 {
8487 /* memory target */
8488 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8492 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8493 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8494 } IEM_MC_ELSE() {
8495 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8496 } IEM_MC_ENDIF();
8497 IEM_MC_ADVANCE_RIP_AND_FINISH();
8498 IEM_MC_END();
8499 }
8500}
8501
8502
8503/** Opcode 0x0f 0x96. */
8504FNIEMOP_DEF(iemOp_setbe_Eb)
8505{
8506 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8507 IEMOP_HLP_MIN_386();
8508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8509
8510 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8511 * any way. AMD says it's "unused", whatever that means. We're
8512 * ignoring for now. */
8513 if (IEM_IS_MODRM_REG_MODE(bRm))
8514 {
8515 /* register target */
8516 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8518 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8519 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8520 } IEM_MC_ELSE() {
8521 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8522 } IEM_MC_ENDIF();
8523 IEM_MC_ADVANCE_RIP_AND_FINISH();
8524 IEM_MC_END();
8525 }
8526 else
8527 {
8528 /* memory target */
8529 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8533 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8534 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8535 } IEM_MC_ELSE() {
8536 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8537 } IEM_MC_ENDIF();
8538 IEM_MC_ADVANCE_RIP_AND_FINISH();
8539 IEM_MC_END();
8540 }
8541}
8542
8543
8544/** Opcode 0x0f 0x97. */
8545FNIEMOP_DEF(iemOp_setnbe_Eb)
8546{
8547 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8548 IEMOP_HLP_MIN_386();
8549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8550
8551 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8552 * any way. AMD says it's "unused", whatever that means. We're
8553 * ignoring for now. */
8554 if (IEM_IS_MODRM_REG_MODE(bRm))
8555 {
8556 /* register target */
8557 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8559 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8560 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8561 } IEM_MC_ELSE() {
8562 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8563 } IEM_MC_ENDIF();
8564 IEM_MC_ADVANCE_RIP_AND_FINISH();
8565 IEM_MC_END();
8566 }
8567 else
8568 {
8569 /* memory target */
8570 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8575 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8576 } IEM_MC_ELSE() {
8577 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8578 } IEM_MC_ENDIF();
8579 IEM_MC_ADVANCE_RIP_AND_FINISH();
8580 IEM_MC_END();
8581 }
8582}
8583
8584
8585/** Opcode 0x0f 0x98. */
8586FNIEMOP_DEF(iemOp_sets_Eb)
8587{
8588 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8589 IEMOP_HLP_MIN_386();
8590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8591
8592 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8593 * any way. AMD says it's "unused", whatever that means. We're
8594 * ignoring for now. */
8595 if (IEM_IS_MODRM_REG_MODE(bRm))
8596 {
8597 /* register target */
8598 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8601 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8602 } IEM_MC_ELSE() {
8603 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8604 } IEM_MC_ENDIF();
8605 IEM_MC_ADVANCE_RIP_AND_FINISH();
8606 IEM_MC_END();
8607 }
8608 else
8609 {
8610 /* memory target */
8611 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8616 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8617 } IEM_MC_ELSE() {
8618 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8619 } IEM_MC_ENDIF();
8620 IEM_MC_ADVANCE_RIP_AND_FINISH();
8621 IEM_MC_END();
8622 }
8623}
8624
8625
8626/** Opcode 0x0f 0x99. */
8627FNIEMOP_DEF(iemOp_setns_Eb)
8628{
8629 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8630 IEMOP_HLP_MIN_386();
8631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8632
8633 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8634 * any way. AMD says it's "unused", whatever that means. We're
8635 * ignoring for now. */
8636 if (IEM_IS_MODRM_REG_MODE(bRm))
8637 {
8638 /* register target */
8639 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8641 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8642 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8643 } IEM_MC_ELSE() {
8644 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8645 } IEM_MC_ENDIF();
8646 IEM_MC_ADVANCE_RIP_AND_FINISH();
8647 IEM_MC_END();
8648 }
8649 else
8650 {
8651 /* memory target */
8652 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8657 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8658 } IEM_MC_ELSE() {
8659 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8660 } IEM_MC_ENDIF();
8661 IEM_MC_ADVANCE_RIP_AND_FINISH();
8662 IEM_MC_END();
8663 }
8664}
8665
8666
8667/** Opcode 0x0f 0x9a. */
8668FNIEMOP_DEF(iemOp_setp_Eb)
8669{
8670 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8671 IEMOP_HLP_MIN_386();
8672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8673
8674 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8675 * any way. AMD says it's "unused", whatever that means. We're
8676 * ignoring for now. */
8677 if (IEM_IS_MODRM_REG_MODE(bRm))
8678 {
8679 /* register target */
8680 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8682 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8683 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8684 } IEM_MC_ELSE() {
8685 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8686 } IEM_MC_ENDIF();
8687 IEM_MC_ADVANCE_RIP_AND_FINISH();
8688 IEM_MC_END();
8689 }
8690 else
8691 {
8692 /* memory target */
8693 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8698 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8699 } IEM_MC_ELSE() {
8700 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8701 } IEM_MC_ENDIF();
8702 IEM_MC_ADVANCE_RIP_AND_FINISH();
8703 IEM_MC_END();
8704 }
8705}
8706
8707
8708/** Opcode 0x0f 0x9b. */
8709FNIEMOP_DEF(iemOp_setnp_Eb)
8710{
8711 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8712 IEMOP_HLP_MIN_386();
8713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8714
8715 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8716 * any way. AMD says it's "unused", whatever that means. We're
8717 * ignoring for now. */
8718 if (IEM_IS_MODRM_REG_MODE(bRm))
8719 {
8720 /* register target */
8721 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8724 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8725 } IEM_MC_ELSE() {
8726 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8727 } IEM_MC_ENDIF();
8728 IEM_MC_ADVANCE_RIP_AND_FINISH();
8729 IEM_MC_END();
8730 }
8731 else
8732 {
8733 /* memory target */
8734 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8738 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8739 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8740 } IEM_MC_ELSE() {
8741 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8742 } IEM_MC_ENDIF();
8743 IEM_MC_ADVANCE_RIP_AND_FINISH();
8744 IEM_MC_END();
8745 }
8746}
8747
8748
8749/** Opcode 0x0f 0x9c. */
8750FNIEMOP_DEF(iemOp_setl_Eb)
8751{
8752 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8753 IEMOP_HLP_MIN_386();
8754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8755
8756 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8757 * any way. AMD says it's "unused", whatever that means. We're
8758 * ignoring for now. */
8759 if (IEM_IS_MODRM_REG_MODE(bRm))
8760 {
8761 /* register target */
8762 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8764 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8765 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8766 } IEM_MC_ELSE() {
8767 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8768 } IEM_MC_ENDIF();
8769 IEM_MC_ADVANCE_RIP_AND_FINISH();
8770 IEM_MC_END();
8771 }
8772 else
8773 {
8774 /* memory target */
8775 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8779 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8780 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8781 } IEM_MC_ELSE() {
8782 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8783 } IEM_MC_ENDIF();
8784 IEM_MC_ADVANCE_RIP_AND_FINISH();
8785 IEM_MC_END();
8786 }
8787}
8788
8789
8790/** Opcode 0x0f 0x9d. */
8791FNIEMOP_DEF(iemOp_setnl_Eb)
8792{
8793 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8794 IEMOP_HLP_MIN_386();
8795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8796
8797 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8798 * any way. AMD says it's "unused", whatever that means. We're
8799 * ignoring for now. */
8800 if (IEM_IS_MODRM_REG_MODE(bRm))
8801 {
8802 /* register target */
8803 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8805 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8806 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8807 } IEM_MC_ELSE() {
8808 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8809 } IEM_MC_ENDIF();
8810 IEM_MC_ADVANCE_RIP_AND_FINISH();
8811 IEM_MC_END();
8812 }
8813 else
8814 {
8815 /* memory target */
8816 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8820 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8821 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8822 } IEM_MC_ELSE() {
8823 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8824 } IEM_MC_ENDIF();
8825 IEM_MC_ADVANCE_RIP_AND_FINISH();
8826 IEM_MC_END();
8827 }
8828}
8829
8830
8831/** Opcode 0x0f 0x9e. */
8832FNIEMOP_DEF(iemOp_setle_Eb)
8833{
8834 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8835 IEMOP_HLP_MIN_386();
8836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8837
8838 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8839 * any way. AMD says it's "unused", whatever that means. We're
8840 * ignoring for now. */
8841 if (IEM_IS_MODRM_REG_MODE(bRm))
8842 {
8843 /* register target */
8844 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8847 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8848 } IEM_MC_ELSE() {
8849 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8850 } IEM_MC_ENDIF();
8851 IEM_MC_ADVANCE_RIP_AND_FINISH();
8852 IEM_MC_END();
8853 }
8854 else
8855 {
8856 /* memory target */
8857 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8861 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8862 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8863 } IEM_MC_ELSE() {
8864 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8865 } IEM_MC_ENDIF();
8866 IEM_MC_ADVANCE_RIP_AND_FINISH();
8867 IEM_MC_END();
8868 }
8869}
8870
8871
8872/** Opcode 0x0f 0x9f. */
8873FNIEMOP_DEF(iemOp_setnle_Eb)
8874{
8875 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8876 IEMOP_HLP_MIN_386();
8877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8878
8879 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8880 * any way. AMD says it's "unused", whatever that means. We're
8881 * ignoring for now. */
8882 if (IEM_IS_MODRM_REG_MODE(bRm))
8883 {
8884 /* register target */
8885 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8887 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8888 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8889 } IEM_MC_ELSE() {
8890 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8891 } IEM_MC_ENDIF();
8892 IEM_MC_ADVANCE_RIP_AND_FINISH();
8893 IEM_MC_END();
8894 }
8895 else
8896 {
8897 /* memory target */
8898 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8902 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8903 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8904 } IEM_MC_ELSE() {
8905 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8906 } IEM_MC_ENDIF();
8907 IEM_MC_ADVANCE_RIP_AND_FINISH();
8908 IEM_MC_END();
8909 }
8910}
8911
8912
8913/** Opcode 0x0f 0xa0. */
8914FNIEMOP_DEF(iemOp_push_fs)
8915{
8916 IEMOP_MNEMONIC(push_fs, "push fs");
8917 IEMOP_HLP_MIN_386();
8918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8919 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8920}
8921
8922
8923/** Opcode 0x0f 0xa1. */
8924FNIEMOP_DEF(iemOp_pop_fs)
8925{
8926 IEMOP_MNEMONIC(pop_fs, "pop fs");
8927 IEMOP_HLP_MIN_386();
8928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8929 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8930 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8931 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8932 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8933 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS),
8934 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8935}
8936
8937
8938/** Opcode 0x0f 0xa2. */
8939FNIEMOP_DEF(iemOp_cpuid)
8940{
8941 IEMOP_MNEMONIC(cpuid, "cpuid");
8942 IEMOP_HLP_MIN_486(); /* not all 486es. */
8943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8944 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8945 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8946 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8947 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8948 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8949 iemCImpl_cpuid);
8950}
8951
8952
8953/**
8954 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8955 * iemOp_bts_Ev_Gv.
8956 */
8957
8958#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8960 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8961 \
8962 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8963 { \
8964 /* register destination. */ \
8965 switch (pVCpu->iem.s.enmEffOpSize) \
8966 { \
8967 case IEMMODE_16BIT: \
8968 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8970 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8971 IEM_MC_ARG(uint16_t, u16Src, 1); \
8972 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8973 \
8974 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8975 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8976 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8977 IEM_MC_REF_EFLAGS(pEFlags); \
8978 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8979 \
8980 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8981 IEM_MC_END(); \
8982 break; \
8983 \
8984 case IEMMODE_32BIT: \
8985 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8987 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8988 IEM_MC_ARG(uint32_t, u32Src, 1); \
8989 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8990 \
8991 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8992 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8993 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8994 IEM_MC_REF_EFLAGS(pEFlags); \
8995 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8996 \
8997 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8998 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8999 IEM_MC_END(); \
9000 break; \
9001 \
9002 case IEMMODE_64BIT: \
9003 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9005 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9006 IEM_MC_ARG(uint64_t, u64Src, 1); \
9007 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9008 \
9009 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9010 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9011 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9012 IEM_MC_REF_EFLAGS(pEFlags); \
9013 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9014 \
9015 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9016 IEM_MC_END(); \
9017 break; \
9018 \
9019 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9020 } \
9021 } \
9022 else \
9023 { \
9024 /* memory destination. */ \
9025 /** @todo test negative bit offsets! */ \
9026 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9027 { \
9028 switch (pVCpu->iem.s.enmEffOpSize) \
9029 { \
9030 case IEMMODE_16BIT: \
9031 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9034 IEMOP_HLP_DONE_DECODING(); \
9035 \
9036 IEM_MC_ARG(uint16_t, u16Src, 1); \
9037 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9038 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9039 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9040 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9041 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9042 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9043 \
9044 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9045 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9046 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9047 \
9048 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9049 IEM_MC_FETCH_EFLAGS(EFlags); \
9050 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9051 \
9052 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9053 IEM_MC_COMMIT_EFLAGS(EFlags); \
9054 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9055 IEM_MC_END(); \
9056 break; \
9057 \
9058 case IEMMODE_32BIT: \
9059 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9062 IEMOP_HLP_DONE_DECODING(); \
9063 \
9064 IEM_MC_ARG(uint32_t, u32Src, 1); \
9065 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9066 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9067 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9068 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9069 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9070 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9071 \
9072 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9073 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9074 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9075 \
9076 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9077 IEM_MC_FETCH_EFLAGS(EFlags); \
9078 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9079 \
9080 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9081 IEM_MC_COMMIT_EFLAGS(EFlags); \
9082 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9083 IEM_MC_END(); \
9084 break; \
9085 \
9086 case IEMMODE_64BIT: \
9087 IEM_MC_BEGIN(3, 5, IEM_MC_F_64BIT, 0); \
9088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9090 IEMOP_HLP_DONE_DECODING(); \
9091 \
9092 IEM_MC_ARG(uint64_t, u64Src, 1); \
9093 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9094 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9095 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9096 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9097 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9098 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9099 \
9100 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9101 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9102 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9103 \
9104 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9105 IEM_MC_FETCH_EFLAGS(EFlags); \
9106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9107 \
9108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9109 IEM_MC_COMMIT_EFLAGS(EFlags); \
9110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9111 IEM_MC_END(); \
9112 break; \
9113 \
9114 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9115 } \
9116 } \
9117 else \
9118 { \
9119 (void)0
9120/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9121#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9122 switch (pVCpu->iem.s.enmEffOpSize) \
9123 { \
9124 case IEMMODE_16BIT: \
9125 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9128 IEMOP_HLP_DONE_DECODING(); \
9129 \
9130 IEM_MC_ARG(uint16_t, u16Src, 1); \
9131 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9132 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9133 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9134 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9135 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9136 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9137 \
9138 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9139 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9140 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9141 \
9142 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9143 IEM_MC_FETCH_EFLAGS(EFlags); \
9144 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9145 \
9146 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9147 IEM_MC_COMMIT_EFLAGS(EFlags); \
9148 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9149 IEM_MC_END(); \
9150 break; \
9151 \
9152 case IEMMODE_32BIT: \
9153 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9156 IEMOP_HLP_DONE_DECODING(); \
9157 \
9158 IEM_MC_ARG(uint32_t, u32Src, 1); \
9159 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9160 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9161 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9162 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9163 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9164 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9165 \
9166 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9167 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9168 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9169 \
9170 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9171 IEM_MC_FETCH_EFLAGS(EFlags); \
9172 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9173 \
9174 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9175 IEM_MC_COMMIT_EFLAGS(EFlags); \
9176 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9177 IEM_MC_END(); \
9178 break; \
9179 \
9180 case IEMMODE_64BIT: \
9181 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9184 IEMOP_HLP_DONE_DECODING(); \
9185 \
9186 IEM_MC_ARG(uint64_t, u64Src, 1); \
9187 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9188 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9189 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9190 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9191 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9192 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9193 \
9194 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9195 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9196 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9197 \
9198 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9199 IEM_MC_FETCH_EFLAGS(EFlags); \
9200 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9201 \
9202 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9203 IEM_MC_COMMIT_EFLAGS(EFlags); \
9204 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9205 IEM_MC_END(); \
9206 break; \
9207 \
9208 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9209 } \
9210 } \
9211 } \
9212 (void)0
9213
9214/* Read-only version (bt). */
9215#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9217 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9218 \
9219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9220 { \
9221 /* register destination. */ \
9222 switch (pVCpu->iem.s.enmEffOpSize) \
9223 { \
9224 case IEMMODE_16BIT: \
9225 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9227 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9228 IEM_MC_ARG(uint16_t, u16Src, 1); \
9229 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9230 \
9231 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9232 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9233 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9234 IEM_MC_REF_EFLAGS(pEFlags); \
9235 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9236 \
9237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9238 IEM_MC_END(); \
9239 break; \
9240 \
9241 case IEMMODE_32BIT: \
9242 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9244 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9245 IEM_MC_ARG(uint32_t, u32Src, 1); \
9246 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9247 \
9248 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9249 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9250 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9251 IEM_MC_REF_EFLAGS(pEFlags); \
9252 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9253 \
9254 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9255 IEM_MC_END(); \
9256 break; \
9257 \
9258 case IEMMODE_64BIT: \
9259 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9261 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9262 IEM_MC_ARG(uint64_t, u64Src, 1); \
9263 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9264 \
9265 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9266 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9267 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9268 IEM_MC_REF_EFLAGS(pEFlags); \
9269 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9270 \
9271 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9272 IEM_MC_END(); \
9273 break; \
9274 \
9275 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9276 } \
9277 } \
9278 else \
9279 { \
9280 /* memory destination. */ \
9281 /** @todo test negative bit offsets! */ \
9282 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9283 { \
9284 switch (pVCpu->iem.s.enmEffOpSize) \
9285 { \
9286 case IEMMODE_16BIT: \
9287 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9290 IEMOP_HLP_DONE_DECODING(); \
9291 \
9292 IEM_MC_ARG(uint16_t, u16Src, 1); \
9293 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9294 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9295 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9296 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9297 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9298 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9299 \
9300 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9301 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9302 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9303 \
9304 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9305 IEM_MC_FETCH_EFLAGS(EFlags); \
9306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9307 \
9308 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
9309 IEM_MC_COMMIT_EFLAGS(EFlags); \
9310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9311 IEM_MC_END(); \
9312 break; \
9313 \
9314 case IEMMODE_32BIT: \
9315 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9318 IEMOP_HLP_DONE_DECODING(); \
9319 \
9320 IEM_MC_ARG(uint32_t, u32Src, 1); \
9321 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9322 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9323 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9324 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9325 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9326 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9327 \
9328 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9329 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9330 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9331 \
9332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9333 IEM_MC_FETCH_EFLAGS(EFlags); \
9334 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9335 \
9336 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
9337 IEM_MC_COMMIT_EFLAGS(EFlags); \
9338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9339 IEM_MC_END(); \
9340 break; \
9341 \
9342 case IEMMODE_64BIT: \
9343 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9346 IEMOP_HLP_DONE_DECODING(); \
9347 \
9348 IEM_MC_ARG(uint64_t, u64Src, 1); \
9349 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9350 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9351 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9352 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9353 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9354 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9355 \
9356 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9357 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9358 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9359 \
9360 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9361 IEM_MC_FETCH_EFLAGS(EFlags); \
9362 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9363 \
9364 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
9365 IEM_MC_COMMIT_EFLAGS(EFlags); \
9366 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9367 IEM_MC_END(); \
9368 break; \
9369 \
9370 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9371 } \
9372 } \
9373 else \
9374 { \
9375 IEMOP_HLP_DONE_DECODING(); \
9376 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9377 } \
9378 } \
9379 (void)0
9380
9381
9382/** Opcode 0x0f 0xa3. */
9383FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9384{
9385 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9386 IEMOP_HLP_MIN_386();
9387 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9388}
9389
9390
9391/**
9392 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9393 */
9394FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9395{
9396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9397 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9398
9399 if (IEM_IS_MODRM_REG_MODE(bRm))
9400 {
9401 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9402
9403 switch (pVCpu->iem.s.enmEffOpSize)
9404 {
9405 case IEMMODE_16BIT:
9406 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9408 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9409 IEM_MC_ARG(uint16_t, u16Src, 1);
9410 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9411 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9412
9413 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9414 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9415 IEM_MC_REF_EFLAGS(pEFlags);
9416 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9417
9418 IEM_MC_ADVANCE_RIP_AND_FINISH();
9419 IEM_MC_END();
9420 break;
9421
9422 case IEMMODE_32BIT:
9423 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9425 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9426 IEM_MC_ARG(uint32_t, u32Src, 1);
9427 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9428 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9429
9430 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9431 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9432 IEM_MC_REF_EFLAGS(pEFlags);
9433 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9434
9435 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9436 IEM_MC_ADVANCE_RIP_AND_FINISH();
9437 IEM_MC_END();
9438 break;
9439
9440 case IEMMODE_64BIT:
9441 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9443 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9444 IEM_MC_ARG(uint64_t, u64Src, 1);
9445 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9446 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9447
9448 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9449 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9450 IEM_MC_REF_EFLAGS(pEFlags);
9451 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9452
9453 IEM_MC_ADVANCE_RIP_AND_FINISH();
9454 IEM_MC_END();
9455 break;
9456
9457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9458 }
9459 }
9460 else
9461 {
9462 switch (pVCpu->iem.s.enmEffOpSize)
9463 {
9464 case IEMMODE_16BIT:
9465 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9468
9469 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9471
9472 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9473 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9474 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9475
9476 IEM_MC_ARG(uint16_t, u16Src, 1);
9477 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9478 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9479 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9480 IEM_MC_FETCH_EFLAGS(EFlags);
9481 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9482
9483 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9484 IEM_MC_COMMIT_EFLAGS(EFlags);
9485 IEM_MC_ADVANCE_RIP_AND_FINISH();
9486 IEM_MC_END();
9487 break;
9488
9489 case IEMMODE_32BIT:
9490 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9493
9494 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9496
9497 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9498 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9499 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9500
9501 IEM_MC_ARG(uint32_t, u32Src, 1);
9502 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9503 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9504 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9505 IEM_MC_FETCH_EFLAGS(EFlags);
9506 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9507
9508 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9509 IEM_MC_COMMIT_EFLAGS(EFlags);
9510 IEM_MC_ADVANCE_RIP_AND_FINISH();
9511 IEM_MC_END();
9512 break;
9513
9514 case IEMMODE_64BIT:
9515 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9518
9519 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9521
9522 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9523 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9524 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9525
9526 IEM_MC_ARG(uint64_t, u64Src, 1);
9527 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9528 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9529 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9530 IEM_MC_FETCH_EFLAGS(EFlags);
9531
9532 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9533
9534 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9535 IEM_MC_COMMIT_EFLAGS(EFlags);
9536 IEM_MC_ADVANCE_RIP_AND_FINISH();
9537 IEM_MC_END();
9538 break;
9539
9540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9541 }
9542 }
9543}
9544
9545
9546/**
9547 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9548 */
9549FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9550{
9551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9552 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9553
9554 if (IEM_IS_MODRM_REG_MODE(bRm))
9555 {
9556 switch (pVCpu->iem.s.enmEffOpSize)
9557 {
9558 case IEMMODE_16BIT:
9559 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9561 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9562 IEM_MC_ARG(uint16_t, u16Src, 1);
9563 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9564 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9565
9566 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9567 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9568 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9569 IEM_MC_REF_EFLAGS(pEFlags);
9570 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9571
9572 IEM_MC_ADVANCE_RIP_AND_FINISH();
9573 IEM_MC_END();
9574 break;
9575
9576 case IEMMODE_32BIT:
9577 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9579 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9580 IEM_MC_ARG(uint32_t, u32Src, 1);
9581 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9582 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9583
9584 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9585 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9586 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9587 IEM_MC_REF_EFLAGS(pEFlags);
9588 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9589
9590 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9591 IEM_MC_ADVANCE_RIP_AND_FINISH();
9592 IEM_MC_END();
9593 break;
9594
9595 case IEMMODE_64BIT:
9596 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9598 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9599 IEM_MC_ARG(uint64_t, u64Src, 1);
9600 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9601 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9602
9603 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9604 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9605 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9606 IEM_MC_REF_EFLAGS(pEFlags);
9607 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9608
9609 IEM_MC_ADVANCE_RIP_AND_FINISH();
9610 IEM_MC_END();
9611 break;
9612
9613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9614 }
9615 }
9616 else
9617 {
9618 switch (pVCpu->iem.s.enmEffOpSize)
9619 {
9620 case IEMMODE_16BIT:
9621 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9622 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9623 IEM_MC_ARG(uint16_t, u16Src, 1);
9624 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9625 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9627 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9628
9629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9631 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9632 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9633 IEM_MC_FETCH_EFLAGS(EFlags);
9634 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9635 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9636
9637 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9638 IEM_MC_COMMIT_EFLAGS(EFlags);
9639 IEM_MC_ADVANCE_RIP_AND_FINISH();
9640 IEM_MC_END();
9641 break;
9642
9643 case IEMMODE_32BIT:
9644 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9645 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9646 IEM_MC_ARG(uint32_t, u32Src, 1);
9647 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9648 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9650 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9651
9652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9654 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9655 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9656 IEM_MC_FETCH_EFLAGS(EFlags);
9657 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9658 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9659
9660 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9661 IEM_MC_COMMIT_EFLAGS(EFlags);
9662 IEM_MC_ADVANCE_RIP_AND_FINISH();
9663 IEM_MC_END();
9664 break;
9665
9666 case IEMMODE_64BIT:
9667 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9668 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9669 IEM_MC_ARG(uint64_t, u64Src, 1);
9670 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9671 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9673 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9674
9675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9677 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9678 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9679 IEM_MC_FETCH_EFLAGS(EFlags);
9680 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9681 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9682
9683 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9684 IEM_MC_COMMIT_EFLAGS(EFlags);
9685 IEM_MC_ADVANCE_RIP_AND_FINISH();
9686 IEM_MC_END();
9687 break;
9688
9689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9690 }
9691 }
9692}
9693
9694
9695
9696/** Opcode 0x0f 0xa4. */
9697FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9698{
9699 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9700 IEMOP_HLP_MIN_386();
9701 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9702}
9703
9704
9705/** Opcode 0x0f 0xa5. */
9706FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9707{
9708 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9709 IEMOP_HLP_MIN_386();
9710 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9711}
9712
9713
9714/** Opcode 0x0f 0xa8. */
9715FNIEMOP_DEF(iemOp_push_gs)
9716{
9717 IEMOP_MNEMONIC(push_gs, "push gs");
9718 IEMOP_HLP_MIN_386();
9719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9720 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9721}
9722
9723
9724/** Opcode 0x0f 0xa9. */
9725FNIEMOP_DEF(iemOp_pop_gs)
9726{
9727 IEMOP_MNEMONIC(pop_gs, "pop gs");
9728 IEMOP_HLP_MIN_386();
9729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9730 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9731 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9732 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9733 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9734 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
9735 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9736}
9737
9738
9739/** Opcode 0x0f 0xaa. */
9740FNIEMOP_DEF(iemOp_rsm)
9741{
9742 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9743 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9745 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9746 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9747 iemCImpl_rsm);
9748}
9749
9750
9751
9752/** Opcode 0x0f 0xab. */
9753FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9754{
9755 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9756 IEMOP_HLP_MIN_386();
9757 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9758 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9759}
9760
9761
9762/** Opcode 0x0f 0xac. */
9763FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9764{
9765 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9766 IEMOP_HLP_MIN_386();
9767 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9768}
9769
9770
9771/** Opcode 0x0f 0xad. */
9772FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9773{
9774 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9775 IEMOP_HLP_MIN_386();
9776 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9777}
9778
9779
9780/** Opcode 0x0f 0xae mem/0. */
9781FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9782{
9783 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9784 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9785 IEMOP_RAISE_INVALID_OPCODE_RET();
9786
9787 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9788 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9791 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9792 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9793 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9794 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9795 IEM_MC_END();
9796}
9797
9798
9799/** Opcode 0x0f 0xae mem/1. */
9800FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9801{
9802 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9803 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9804 IEMOP_RAISE_INVALID_OPCODE_RET();
9805
9806 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9807 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9810 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9811 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9812 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9813 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9814 IEM_MC_END();
9815}
9816
9817
9818/**
9819 * @opmaps grp15
9820 * @opcode !11/2
9821 * @oppfx none
9822 * @opcpuid sse
9823 * @opgroup og_sse_mxcsrsm
9824 * @opxcpttype 5
9825 * @optest op1=0 -> mxcsr=0
9826 * @optest op1=0x2083 -> mxcsr=0x2083
9827 * @optest op1=0xfffffffe -> value.xcpt=0xd
9828 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9829 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9830 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9831 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9832 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9833 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9834 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9835 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9836 */
9837FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9838{
9839 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9840 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9841 IEMOP_RAISE_INVALID_OPCODE_RET();
9842
9843 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9844 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9847 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9848 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9849 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9850 IEM_MC_END();
9851}
9852
9853
9854/**
9855 * @opmaps grp15
9856 * @opcode !11/3
9857 * @oppfx none
9858 * @opcpuid sse
9859 * @opgroup og_sse_mxcsrsm
9860 * @opxcpttype 5
9861 * @optest mxcsr=0 -> op1=0
9862 * @optest mxcsr=0x2083 -> op1=0x2083
9863 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9864 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9865 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9866 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9867 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9868 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9869 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9870 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9871 */
9872FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9873{
9874 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9875 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9876 IEMOP_RAISE_INVALID_OPCODE_RET();
9877
9878 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9879 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9882 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9883 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9884 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9885 IEM_MC_END();
9886}
9887
9888
9889/**
9890 * @opmaps grp15
9891 * @opcode !11/4
9892 * @oppfx none
9893 * @opcpuid xsave
9894 * @opgroup og_system
9895 * @opxcpttype none
9896 */
9897FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9898{
9899 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9900 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9901 IEMOP_RAISE_INVALID_OPCODE_RET();
9902
9903 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9904 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9907 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9908 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9909 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9910 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9911 IEM_MC_END();
9912}
9913
9914
9915/**
9916 * @opmaps grp15
9917 * @opcode !11/5
9918 * @oppfx none
9919 * @opcpuid xsave
9920 * @opgroup og_system
9921 * @opxcpttype none
9922 */
9923FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9924{
9925 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9926 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9927 IEMOP_RAISE_INVALID_OPCODE_RET();
9928
9929 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9930 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9933 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9934 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9935 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9936 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9937 IEM_MC_END();
9938}
9939
9940/** Opcode 0x0f 0xae mem/6. */
9941FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9942
9943/**
9944 * @opmaps grp15
9945 * @opcode !11/7
9946 * @oppfx none
9947 * @opcpuid clfsh
9948 * @opgroup og_cachectl
9949 * @optest op1=1 ->
9950 */
9951FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9952{
9953 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9954 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9955 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9956
9957 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9958 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9961 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9962 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9963 IEM_MC_END();
9964}
9965
9966/**
9967 * @opmaps grp15
9968 * @opcode !11/7
9969 * @oppfx 0x66
9970 * @opcpuid clflushopt
9971 * @opgroup og_cachectl
9972 * @optest op1=1 ->
9973 */
9974FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9975{
9976 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9977 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9978 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9979
9980 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9981 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9984 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9985 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9986 IEM_MC_END();
9987}
9988
9989
9990/** Opcode 0x0f 0xae 11b/5. */
9991FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9992{
9993 RT_NOREF_PV(bRm);
9994 IEMOP_MNEMONIC(lfence, "lfence");
9995 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9997#ifdef RT_ARCH_ARM64
9998 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9999#else
10000 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10001 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
10002 else
10003 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10004#endif
10005 IEM_MC_ADVANCE_RIP_AND_FINISH();
10006 IEM_MC_END();
10007}
10008
10009
10010/** Opcode 0x0f 0xae 11b/6. */
10011FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
10012{
10013 RT_NOREF_PV(bRm);
10014 IEMOP_MNEMONIC(mfence, "mfence");
10015 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10017#ifdef RT_ARCH_ARM64
10018 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
10019#else
10020 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10021 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
10022 else
10023 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10024#endif
10025 IEM_MC_ADVANCE_RIP_AND_FINISH();
10026 IEM_MC_END();
10027}
10028
10029
10030/** Opcode 0x0f 0xae 11b/7. */
10031FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
10032{
10033 RT_NOREF_PV(bRm);
10034 IEMOP_MNEMONIC(sfence, "sfence");
10035 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10037#ifdef RT_ARCH_ARM64
10038 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10039#else
10040 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10041 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10042 else
10043 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10044#endif
10045 IEM_MC_ADVANCE_RIP_AND_FINISH();
10046 IEM_MC_END();
10047}
10048
10049
10050/** Opcode 0xf3 0x0f 0xae 11b/0. */
10051FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10052{
10053 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10054 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10055 {
10056 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10058 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10059 IEM_MC_LOCAL(uint64_t, u64Dst);
10060 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10061 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10062 IEM_MC_ADVANCE_RIP_AND_FINISH();
10063 IEM_MC_END();
10064 }
10065 else
10066 {
10067 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10069 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10070 IEM_MC_LOCAL(uint32_t, u32Dst);
10071 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10072 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10073 IEM_MC_ADVANCE_RIP_AND_FINISH();
10074 IEM_MC_END();
10075 }
10076}
10077
10078
10079/** Opcode 0xf3 0x0f 0xae 11b/1. */
10080FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10081{
10082 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10083 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10084 {
10085 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10087 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10088 IEM_MC_LOCAL(uint64_t, u64Dst);
10089 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10090 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10091 IEM_MC_ADVANCE_RIP_AND_FINISH();
10092 IEM_MC_END();
10093 }
10094 else
10095 {
10096 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10098 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10099 IEM_MC_LOCAL(uint32_t, u32Dst);
10100 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10101 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10102 IEM_MC_ADVANCE_RIP_AND_FINISH();
10103 IEM_MC_END();
10104 }
10105}
10106
10107
10108/** Opcode 0xf3 0x0f 0xae 11b/2. */
10109FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10110{
10111 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10112 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10113 {
10114 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10116 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10117 IEM_MC_LOCAL(uint64_t, u64Dst);
10118 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10119 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10120 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10121 IEM_MC_ADVANCE_RIP_AND_FINISH();
10122 IEM_MC_END();
10123 }
10124 else
10125 {
10126 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10128 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10129 IEM_MC_LOCAL(uint32_t, u32Dst);
10130 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10131 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10132 IEM_MC_ADVANCE_RIP_AND_FINISH();
10133 IEM_MC_END();
10134 }
10135}
10136
10137
10138/** Opcode 0xf3 0x0f 0xae 11b/3. */
10139FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10140{
10141 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10142 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10143 {
10144 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10146 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10147 IEM_MC_LOCAL(uint64_t, u64Dst);
10148 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10149 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10150 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10151 IEM_MC_ADVANCE_RIP_AND_FINISH();
10152 IEM_MC_END();
10153 }
10154 else
10155 {
10156 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10158 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10159 IEM_MC_LOCAL(uint32_t, u32Dst);
10160 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10161 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10162 IEM_MC_ADVANCE_RIP_AND_FINISH();
10163 IEM_MC_END();
10164 }
10165}
10166
10167
10168/**
10169 * Group 15 jump table for register variant.
10170 */
10171IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10172{ /* pfx: none, 066h, 0f3h, 0f2h */
10173 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10174 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10175 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10176 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10177 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10178 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10179 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10180 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10181};
10182AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10183
10184
10185/**
10186 * Group 15 jump table for memory variant.
10187 */
10188IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10189{ /* pfx: none, 066h, 0f3h, 0f2h */
10190 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10191 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10192 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10193 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10194 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10195 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10196 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10197 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10198};
10199AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10200
10201
10202/** Opcode 0x0f 0xae. */
10203FNIEMOP_DEF(iemOp_Grp15)
10204{
10205 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10207 if (IEM_IS_MODRM_REG_MODE(bRm))
10208 /* register, register */
10209 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10210 + pVCpu->iem.s.idxPrefix], bRm);
10211 /* memory, register */
10212 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10213 + pVCpu->iem.s.idxPrefix], bRm);
10214}
10215
10216
10217/** Opcode 0x0f 0xaf. */
10218FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10219{
10220 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10221 IEMOP_HLP_MIN_386();
10222 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10223 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10224 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_MIN_386);
10225}
10226
10227
10228/** Opcode 0x0f 0xb0. */
10229FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10230{
10231 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10232 IEMOP_HLP_MIN_486();
10233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10234
10235 if (IEM_IS_MODRM_REG_MODE(bRm))
10236 {
10237 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10238 IEMOP_HLP_DONE_DECODING();
10239 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10240 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10241 IEM_MC_ARG(uint8_t, u8Src, 2);
10242 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10243
10244 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10245 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10246 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10247 IEM_MC_REF_EFLAGS(pEFlags);
10248 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10249 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10250 else
10251 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10252
10253 IEM_MC_ADVANCE_RIP_AND_FINISH();
10254 IEM_MC_END();
10255 }
10256 else
10257 {
10258 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10259 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10260 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10261 IEM_MC_ARG(uint8_t, u8Src, 2);
10262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10264 IEM_MC_LOCAL(uint8_t, u8Al);
10265 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10266
10267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10268 IEMOP_HLP_DONE_DECODING();
10269 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10270 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10271 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10272 IEM_MC_FETCH_EFLAGS(EFlags);
10273 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10274 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10275 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10276 else
10277 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10278
10279 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
10280 IEM_MC_COMMIT_EFLAGS(EFlags);
10281 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10282 IEM_MC_ADVANCE_RIP_AND_FINISH();
10283 IEM_MC_END();
10284 }
10285}
10286
10287/** Opcode 0x0f 0xb1. */
10288FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10289{
10290 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10291 IEMOP_HLP_MIN_486();
10292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10293
10294 if (IEM_IS_MODRM_REG_MODE(bRm))
10295 {
10296 switch (pVCpu->iem.s.enmEffOpSize)
10297 {
10298 case IEMMODE_16BIT:
10299 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10300 IEMOP_HLP_DONE_DECODING();
10301 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10302 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10303 IEM_MC_ARG(uint16_t, u16Src, 2);
10304 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10305
10306 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10307 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10308 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10309 IEM_MC_REF_EFLAGS(pEFlags);
10310 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10311 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10312 else
10313 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10314
10315 IEM_MC_ADVANCE_RIP_AND_FINISH();
10316 IEM_MC_END();
10317 break;
10318
10319 case IEMMODE_32BIT:
10320 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10321 IEMOP_HLP_DONE_DECODING();
10322 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10323 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10324 IEM_MC_ARG(uint32_t, u32Src, 2);
10325 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10326
10327 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10328 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10329 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10330 IEM_MC_REF_EFLAGS(pEFlags);
10331 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10332 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10333 else
10334 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10335
10336 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10337 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10338 } IEM_MC_ELSE() {
10339 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10340 } IEM_MC_ENDIF();
10341
10342 IEM_MC_ADVANCE_RIP_AND_FINISH();
10343 IEM_MC_END();
10344 break;
10345
10346 case IEMMODE_64BIT:
10347 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
10348 IEMOP_HLP_DONE_DECODING();
10349 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10350 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10351#ifdef RT_ARCH_X86
10352 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10353#else
10354 IEM_MC_ARG(uint64_t, u64Src, 2);
10355#endif
10356 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10357
10358 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10359 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10360 IEM_MC_REF_EFLAGS(pEFlags);
10361#ifdef RT_ARCH_X86
10362 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10363 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10364 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10365 else
10366 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10367#else
10368 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10369 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10370 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10371 else
10372 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10373#endif
10374
10375 IEM_MC_ADVANCE_RIP_AND_FINISH();
10376 IEM_MC_END();
10377 break;
10378
10379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10380 }
10381 }
10382 else
10383 {
10384 switch (pVCpu->iem.s.enmEffOpSize)
10385 {
10386 case IEMMODE_16BIT:
10387 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10388 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10389 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10390 IEM_MC_ARG(uint16_t, u16Src, 2);
10391 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10393 IEM_MC_LOCAL(uint16_t, u16Ax);
10394 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10395
10396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10397 IEMOP_HLP_DONE_DECODING();
10398 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10399 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10400 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10401 IEM_MC_FETCH_EFLAGS(EFlags);
10402 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10403 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10404 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10405 else
10406 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10407
10408 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
10409 IEM_MC_COMMIT_EFLAGS(EFlags);
10410 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10411 IEM_MC_ADVANCE_RIP_AND_FINISH();
10412 IEM_MC_END();
10413 break;
10414
10415 case IEMMODE_32BIT:
10416 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10417 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10418 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10419 IEM_MC_ARG(uint32_t, u32Src, 2);
10420 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10422 IEM_MC_LOCAL(uint32_t, u32Eax);
10423 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10424
10425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10426 IEMOP_HLP_DONE_DECODING();
10427 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10428 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10429 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10430 IEM_MC_FETCH_EFLAGS(EFlags);
10431 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10432 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10433 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10434 else
10435 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10436
10437 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
10438 IEM_MC_COMMIT_EFLAGS(EFlags);
10439
10440 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10441 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10442 } IEM_MC_ENDIF();
10443
10444 IEM_MC_ADVANCE_RIP_AND_FINISH();
10445 IEM_MC_END();
10446 break;
10447
10448 case IEMMODE_64BIT:
10449 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT, 0);
10450 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10451 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10452#ifdef RT_ARCH_X86
10453 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10454#else
10455 IEM_MC_ARG(uint64_t, u64Src, 2);
10456#endif
10457 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10459 IEM_MC_LOCAL(uint64_t, u64Rax);
10460 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10461
10462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10463 IEMOP_HLP_DONE_DECODING();
10464 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10465 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10466 IEM_MC_FETCH_EFLAGS(EFlags);
10467 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10468#ifdef RT_ARCH_X86
10469 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10470 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10471 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10472 else
10473 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10474#else
10475 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10476 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10477 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10478 else
10479 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10480#endif
10481
10482 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
10483 IEM_MC_COMMIT_EFLAGS(EFlags);
10484 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10485 IEM_MC_ADVANCE_RIP_AND_FINISH();
10486 IEM_MC_END();
10487 break;
10488
10489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10490 }
10491 }
10492}
10493
10494
10495/** Opcode 0x0f 0xb2. */
10496FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10497{
10498 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10499 IEMOP_HLP_MIN_386();
10500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10501 if (IEM_IS_MODRM_REG_MODE(bRm))
10502 IEMOP_RAISE_INVALID_OPCODE_RET();
10503 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10504}
10505
10506
10507/** Opcode 0x0f 0xb3. */
10508FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10509{
10510 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10511 IEMOP_HLP_MIN_386();
10512 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10513 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10514}
10515
10516
10517/** Opcode 0x0f 0xb4. */
10518FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10519{
10520 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10521 IEMOP_HLP_MIN_386();
10522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10523 if (IEM_IS_MODRM_REG_MODE(bRm))
10524 IEMOP_RAISE_INVALID_OPCODE_RET();
10525 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10526}
10527
10528
10529/** Opcode 0x0f 0xb5. */
10530FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10531{
10532 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10533 IEMOP_HLP_MIN_386();
10534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10535 if (IEM_IS_MODRM_REG_MODE(bRm))
10536 IEMOP_RAISE_INVALID_OPCODE_RET();
10537 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10538}
10539
10540
10541/** Opcode 0x0f 0xb6. */
10542FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10543{
10544 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10545 IEMOP_HLP_MIN_386();
10546
10547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10548
10549 /*
10550 * If rm is denoting a register, no more instruction bytes.
10551 */
10552 if (IEM_IS_MODRM_REG_MODE(bRm))
10553 {
10554 switch (pVCpu->iem.s.enmEffOpSize)
10555 {
10556 case IEMMODE_16BIT:
10557 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10559 IEM_MC_LOCAL(uint16_t, u16Value);
10560 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10561 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10562 IEM_MC_ADVANCE_RIP_AND_FINISH();
10563 IEM_MC_END();
10564 break;
10565
10566 case IEMMODE_32BIT:
10567 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10569 IEM_MC_LOCAL(uint32_t, u32Value);
10570 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10571 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10572 IEM_MC_ADVANCE_RIP_AND_FINISH();
10573 IEM_MC_END();
10574 break;
10575
10576 case IEMMODE_64BIT:
10577 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10579 IEM_MC_LOCAL(uint64_t, u64Value);
10580 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10581 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10582 IEM_MC_ADVANCE_RIP_AND_FINISH();
10583 IEM_MC_END();
10584 break;
10585
10586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10587 }
10588 }
10589 else
10590 {
10591 /*
10592 * We're loading a register from memory.
10593 */
10594 switch (pVCpu->iem.s.enmEffOpSize)
10595 {
10596 case IEMMODE_16BIT:
10597 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10598 IEM_MC_LOCAL(uint16_t, u16Value);
10599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10602 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10603 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10604 IEM_MC_ADVANCE_RIP_AND_FINISH();
10605 IEM_MC_END();
10606 break;
10607
10608 case IEMMODE_32BIT:
10609 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10610 IEM_MC_LOCAL(uint32_t, u32Value);
10611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10614 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10615 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10616 IEM_MC_ADVANCE_RIP_AND_FINISH();
10617 IEM_MC_END();
10618 break;
10619
10620 case IEMMODE_64BIT:
10621 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10622 IEM_MC_LOCAL(uint64_t, u64Value);
10623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10626 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10627 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10628 IEM_MC_ADVANCE_RIP_AND_FINISH();
10629 IEM_MC_END();
10630 break;
10631
10632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10633 }
10634 }
10635}
10636
10637
10638/** Opcode 0x0f 0xb7. */
10639FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10640{
10641 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10642 IEMOP_HLP_MIN_386();
10643
10644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10645
10646 /** @todo Not entirely sure how the operand size prefix is handled here,
10647 * assuming that it will be ignored. Would be nice to have a few
10648 * test for this. */
10649
10650 /** @todo There should be no difference in the behaviour whether REX.W is
10651 * present or not... */
10652
10653 /*
10654 * If rm is denoting a register, no more instruction bytes.
10655 */
10656 if (IEM_IS_MODRM_REG_MODE(bRm))
10657 {
10658 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10659 {
10660 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10662 IEM_MC_LOCAL(uint32_t, u32Value);
10663 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10664 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10665 IEM_MC_ADVANCE_RIP_AND_FINISH();
10666 IEM_MC_END();
10667 }
10668 else
10669 {
10670 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10672 IEM_MC_LOCAL(uint64_t, u64Value);
10673 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10674 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10675 IEM_MC_ADVANCE_RIP_AND_FINISH();
10676 IEM_MC_END();
10677 }
10678 }
10679 else
10680 {
10681 /*
10682 * We're loading a register from memory.
10683 */
10684 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10685 {
10686 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10687 IEM_MC_LOCAL(uint32_t, u32Value);
10688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10691 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10692 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10693 IEM_MC_ADVANCE_RIP_AND_FINISH();
10694 IEM_MC_END();
10695 }
10696 else
10697 {
10698 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10699 IEM_MC_LOCAL(uint64_t, u64Value);
10700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10703 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10704 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10705 IEM_MC_ADVANCE_RIP_AND_FINISH();
10706 IEM_MC_END();
10707 }
10708 }
10709}
10710
10711
10712/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10713FNIEMOP_UD_STUB(iemOp_jmpe);
10714
10715
10716/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10717FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10718{
10719 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10720 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10721 return iemOp_InvalidNeedRM(pVCpu);
10722#ifndef TST_IEM_CHECK_MC
10723# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10724 static const IEMOPBINSIZES s_Native =
10725 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10726# endif
10727 static const IEMOPBINSIZES s_Fallback =
10728 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10729#endif
10730 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10731 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
10732}
10733
10734
10735/**
10736 * @opcode 0xb9
10737 * @opinvalid intel-modrm
10738 * @optest ->
10739 */
10740FNIEMOP_DEF(iemOp_Grp10)
10741{
10742 /*
10743 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10744 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10745 */
10746 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10747 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10748 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10749}
10750
10751
10752/**
10753 * Body for group 8 bit instruction.
10754 */
10755#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10757 \
10758 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10759 { \
10760 /* register destination. */ \
10761 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10762 \
10763 switch (pVCpu->iem.s.enmEffOpSize) \
10764 { \
10765 case IEMMODE_16BIT: \
10766 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10768 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10769 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10770 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10771 \
10772 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10773 IEM_MC_REF_EFLAGS(pEFlags); \
10774 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10775 \
10776 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10777 IEM_MC_END(); \
10778 break; \
10779 \
10780 case IEMMODE_32BIT: \
10781 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10783 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10784 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10785 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10786 \
10787 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10788 IEM_MC_REF_EFLAGS(pEFlags); \
10789 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10790 \
10791 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10793 IEM_MC_END(); \
10794 break; \
10795 \
10796 case IEMMODE_64BIT: \
10797 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10799 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10800 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10801 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10802 \
10803 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10804 IEM_MC_REF_EFLAGS(pEFlags); \
10805 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10806 \
10807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10808 IEM_MC_END(); \
10809 break; \
10810 \
10811 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10812 } \
10813 } \
10814 else \
10815 { \
10816 /* memory destination. */ \
10817 /** @todo test negative bit offsets! */ \
10818 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10819 { \
10820 switch (pVCpu->iem.s.enmEffOpSize) \
10821 { \
10822 case IEMMODE_16BIT: \
10823 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10826 \
10827 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10828 IEMOP_HLP_DONE_DECODING(); \
10829 \
10830 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10831 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10832 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10833 \
10834 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10835 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10836 IEM_MC_FETCH_EFLAGS(EFlags); \
10837 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10838 \
10839 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10840 IEM_MC_COMMIT_EFLAGS(EFlags); \
10841 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10842 IEM_MC_END(); \
10843 break; \
10844 \
10845 case IEMMODE_32BIT: \
10846 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10849 \
10850 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10851 IEMOP_HLP_DONE_DECODING(); \
10852 \
10853 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10854 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10855 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10856 \
10857 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10858 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10859 IEM_MC_FETCH_EFLAGS(EFlags); \
10860 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10861 \
10862 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10863 IEM_MC_COMMIT_EFLAGS(EFlags); \
10864 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10865 IEM_MC_END(); \
10866 break; \
10867 \
10868 case IEMMODE_64BIT: \
10869 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10872 \
10873 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10874 IEMOP_HLP_DONE_DECODING(); \
10875 \
10876 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10877 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10878 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10879 \
10880 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10882 IEM_MC_FETCH_EFLAGS(EFlags); \
10883 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10884 \
10885 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10886 IEM_MC_COMMIT_EFLAGS(EFlags); \
10887 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10888 IEM_MC_END(); \
10889 break; \
10890 \
10891 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10892 } \
10893 } \
10894 else \
10895 { \
10896 (void)0
10897/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10898#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10899 switch (pVCpu->iem.s.enmEffOpSize) \
10900 { \
10901 case IEMMODE_16BIT: \
10902 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10905 \
10906 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10907 IEMOP_HLP_DONE_DECODING(); \
10908 \
10909 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10910 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10911 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10912 \
10913 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10915 IEM_MC_FETCH_EFLAGS(EFlags); \
10916 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10917 \
10918 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10919 IEM_MC_COMMIT_EFLAGS(EFlags); \
10920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10921 IEM_MC_END(); \
10922 break; \
10923 \
10924 case IEMMODE_32BIT: \
10925 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10928 \
10929 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10930 IEMOP_HLP_DONE_DECODING(); \
10931 \
10932 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10933 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10934 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10935 \
10936 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10937 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10938 IEM_MC_FETCH_EFLAGS(EFlags); \
10939 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10940 \
10941 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10942 IEM_MC_COMMIT_EFLAGS(EFlags); \
10943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10944 IEM_MC_END(); \
10945 break; \
10946 \
10947 case IEMMODE_64BIT: \
10948 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10951 \
10952 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10953 IEMOP_HLP_DONE_DECODING(); \
10954 \
10955 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10956 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10957 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10958 \
10959 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10960 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10961 IEM_MC_FETCH_EFLAGS(EFlags); \
10962 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10963 \
10964 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10965 IEM_MC_COMMIT_EFLAGS(EFlags); \
10966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10967 IEM_MC_END(); \
10968 break; \
10969 \
10970 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10971 } \
10972 } \
10973 } \
10974 (void)0
10975
10976/* Read-only version (bt) */
10977#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10978 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10979 \
10980 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10981 { \
10982 /* register destination. */ \
10983 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10984 \
10985 switch (pVCpu->iem.s.enmEffOpSize) \
10986 { \
10987 case IEMMODE_16BIT: \
10988 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10990 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10991 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10992 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10993 \
10994 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10995 IEM_MC_REF_EFLAGS(pEFlags); \
10996 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10997 \
10998 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10999 IEM_MC_END(); \
11000 break; \
11001 \
11002 case IEMMODE_32BIT: \
11003 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
11004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11005 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11006 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11007 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11008 \
11009 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11010 IEM_MC_REF_EFLAGS(pEFlags); \
11011 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11012 \
11013 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11014 IEM_MC_END(); \
11015 break; \
11016 \
11017 case IEMMODE_64BIT: \
11018 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
11019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11020 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11021 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11022 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11023 \
11024 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11025 IEM_MC_REF_EFLAGS(pEFlags); \
11026 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11027 \
11028 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11029 IEM_MC_END(); \
11030 break; \
11031 \
11032 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11033 } \
11034 } \
11035 else \
11036 { \
11037 /* memory destination. */ \
11038 /** @todo test negative bit offsets! */ \
11039 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11040 { \
11041 switch (pVCpu->iem.s.enmEffOpSize) \
11042 { \
11043 case IEMMODE_16BIT: \
11044 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11047 \
11048 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11049 IEMOP_HLP_DONE_DECODING(); \
11050 \
11051 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11052 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11053 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11054 \
11055 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11056 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11057 IEM_MC_FETCH_EFLAGS(EFlags); \
11058 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11059 \
11060 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
11061 IEM_MC_COMMIT_EFLAGS(EFlags); \
11062 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11063 IEM_MC_END(); \
11064 break; \
11065 \
11066 case IEMMODE_32BIT: \
11067 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11070 \
11071 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11072 IEMOP_HLP_DONE_DECODING(); \
11073 \
11074 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11075 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11076 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11077 \
11078 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11079 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11080 IEM_MC_FETCH_EFLAGS(EFlags); \
11081 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11082 \
11083 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
11084 IEM_MC_COMMIT_EFLAGS(EFlags); \
11085 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11086 IEM_MC_END(); \
11087 break; \
11088 \
11089 case IEMMODE_64BIT: \
11090 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11093 \
11094 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11095 IEMOP_HLP_DONE_DECODING(); \
11096 \
11097 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11098 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11099 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11100 \
11101 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11102 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11103 IEM_MC_FETCH_EFLAGS(EFlags); \
11104 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11105 \
11106 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
11107 IEM_MC_COMMIT_EFLAGS(EFlags); \
11108 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11109 IEM_MC_END(); \
11110 break; \
11111 \
11112 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11113 } \
11114 } \
11115 else \
11116 { \
11117 IEMOP_HLP_DONE_DECODING(); \
11118 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11119 } \
11120 } \
11121 (void)0
11122
11123
11124/** Opcode 0x0f 0xba /4. */
11125FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11126{
11127 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11128 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11129}
11130
11131
11132/** Opcode 0x0f 0xba /5. */
11133FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11134{
11135 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11136 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11137 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11138}
11139
11140
11141/** Opcode 0x0f 0xba /6. */
11142FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11143{
11144 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11145 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11146 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11147}
11148
11149
11150/** Opcode 0x0f 0xba /7. */
11151FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11152{
11153 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11154 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11155 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11156}
11157
11158
11159/** Opcode 0x0f 0xba. */
11160FNIEMOP_DEF(iemOp_Grp8)
11161{
11162 IEMOP_HLP_MIN_386();
11163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11164 switch (IEM_GET_MODRM_REG_8(bRm))
11165 {
11166 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11167 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11168 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11169 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11170
11171 case 0: case 1: case 2: case 3:
11172 /* Both AMD and Intel want full modr/m decoding and imm8. */
11173 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11174
11175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11176 }
11177}
11178
11179
11180/** Opcode 0x0f 0xbb. */
11181FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11182{
11183 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11184 IEMOP_HLP_MIN_386();
11185 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11186 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11187}
11188
11189
11190/**
11191 * Common worker for BSF and BSR instructions.
11192 *
11193 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11194 * the destination register, which means that for 32-bit operations the high
11195 * bits must be left alone.
11196 *
11197 * @param pImpl Pointer to the instruction implementation (assembly).
11198 */
11199FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
11200{
11201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11202
11203 /*
11204 * If rm is denoting a register, no more instruction bytes.
11205 */
11206 if (IEM_IS_MODRM_REG_MODE(bRm))
11207 {
11208 switch (pVCpu->iem.s.enmEffOpSize)
11209 {
11210 case IEMMODE_16BIT:
11211 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11213 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11214 IEM_MC_ARG(uint16_t, u16Src, 1);
11215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11216
11217 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11218 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11219 IEM_MC_REF_EFLAGS(pEFlags);
11220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11221
11222 IEM_MC_ADVANCE_RIP_AND_FINISH();
11223 IEM_MC_END();
11224 break;
11225
11226 case IEMMODE_32BIT:
11227 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11229 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11230 IEM_MC_ARG(uint32_t, u32Src, 1);
11231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11232
11233 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11234 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11235 IEM_MC_REF_EFLAGS(pEFlags);
11236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11237 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11238 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11239 } IEM_MC_ENDIF();
11240 IEM_MC_ADVANCE_RIP_AND_FINISH();
11241 IEM_MC_END();
11242 break;
11243
11244 case IEMMODE_64BIT:
11245 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11247 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11248 IEM_MC_ARG(uint64_t, u64Src, 1);
11249 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11250
11251 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11252 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11253 IEM_MC_REF_EFLAGS(pEFlags);
11254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11255
11256 IEM_MC_ADVANCE_RIP_AND_FINISH();
11257 IEM_MC_END();
11258 break;
11259
11260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11261 }
11262 }
11263 else
11264 {
11265 /*
11266 * We're accessing memory.
11267 */
11268 switch (pVCpu->iem.s.enmEffOpSize)
11269 {
11270 case IEMMODE_16BIT:
11271 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11272 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11273 IEM_MC_ARG(uint16_t, u16Src, 1);
11274 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11276
11277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11279 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11280 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11281 IEM_MC_REF_EFLAGS(pEFlags);
11282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11283
11284 IEM_MC_ADVANCE_RIP_AND_FINISH();
11285 IEM_MC_END();
11286 break;
11287
11288 case IEMMODE_32BIT:
11289 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11290 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11291 IEM_MC_ARG(uint32_t, u32Src, 1);
11292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11294
11295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11297 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11298 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11299 IEM_MC_REF_EFLAGS(pEFlags);
11300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11301
11302 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11303 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11304 } IEM_MC_ENDIF();
11305 IEM_MC_ADVANCE_RIP_AND_FINISH();
11306 IEM_MC_END();
11307 break;
11308
11309 case IEMMODE_64BIT:
11310 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
11311 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11312 IEM_MC_ARG(uint64_t, u64Src, 1);
11313 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11315
11316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11318 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11319 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11320 IEM_MC_REF_EFLAGS(pEFlags);
11321 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11322
11323 IEM_MC_ADVANCE_RIP_AND_FINISH();
11324 IEM_MC_END();
11325 break;
11326
11327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11328 }
11329 }
11330}
11331
11332
11333/** Opcode 0x0f 0xbc. */
11334FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11335{
11336 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11337 IEMOP_HLP_MIN_386();
11338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11339 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11340}
11341
11342
11343/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11344FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11345{
11346 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11347 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11348 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11349
11350#ifndef TST_IEM_CHECK_MC
11351 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11352 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11353 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11354 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11355 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11356 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11357 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11358 {
11359 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11360 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11361 };
11362#endif
11363 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11364 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11365 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11366 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11367}
11368
11369
11370/** Opcode 0x0f 0xbd. */
11371FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11372{
11373 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11374 IEMOP_HLP_MIN_386();
11375 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11376 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11377}
11378
11379
11380/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11381FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11382{
11383 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11384 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11385 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11386
11387#ifndef TST_IEM_CHECK_MC
11388 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11389 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11390 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11391 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11392 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11393 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11394 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11395 {
11396 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11397 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11398 };
11399#endif
11400 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11401 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11402 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11403 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11404}
11405
11406
11407
11408/** Opcode 0x0f 0xbe. */
11409FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11410{
11411 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11412 IEMOP_HLP_MIN_386();
11413
11414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11415
11416 /*
11417 * If rm is denoting a register, no more instruction bytes.
11418 */
11419 if (IEM_IS_MODRM_REG_MODE(bRm))
11420 {
11421 switch (pVCpu->iem.s.enmEffOpSize)
11422 {
11423 case IEMMODE_16BIT:
11424 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11426 IEM_MC_LOCAL(uint16_t, u16Value);
11427 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11428 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11429 IEM_MC_ADVANCE_RIP_AND_FINISH();
11430 IEM_MC_END();
11431 break;
11432
11433 case IEMMODE_32BIT:
11434 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11436 IEM_MC_LOCAL(uint32_t, u32Value);
11437 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11438 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11439 IEM_MC_ADVANCE_RIP_AND_FINISH();
11440 IEM_MC_END();
11441 break;
11442
11443 case IEMMODE_64BIT:
11444 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11446 IEM_MC_LOCAL(uint64_t, u64Value);
11447 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11448 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11449 IEM_MC_ADVANCE_RIP_AND_FINISH();
11450 IEM_MC_END();
11451 break;
11452
11453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11454 }
11455 }
11456 else
11457 {
11458 /*
11459 * We're loading a register from memory.
11460 */
11461 switch (pVCpu->iem.s.enmEffOpSize)
11462 {
11463 case IEMMODE_16BIT:
11464 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11465 IEM_MC_LOCAL(uint16_t, u16Value);
11466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11469 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11470 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11471 IEM_MC_ADVANCE_RIP_AND_FINISH();
11472 IEM_MC_END();
11473 break;
11474
11475 case IEMMODE_32BIT:
11476 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11477 IEM_MC_LOCAL(uint32_t, u32Value);
11478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11481 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11482 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11483 IEM_MC_ADVANCE_RIP_AND_FINISH();
11484 IEM_MC_END();
11485 break;
11486
11487 case IEMMODE_64BIT:
11488 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11489 IEM_MC_LOCAL(uint64_t, u64Value);
11490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11493 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11494 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11495 IEM_MC_ADVANCE_RIP_AND_FINISH();
11496 IEM_MC_END();
11497 break;
11498
11499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11500 }
11501 }
11502}
11503
11504
11505/** Opcode 0x0f 0xbf. */
11506FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11507{
11508 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11509 IEMOP_HLP_MIN_386();
11510
11511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11512
11513 /** @todo Not entirely sure how the operand size prefix is handled here,
11514 * assuming that it will be ignored. Would be nice to have a few
11515 * test for this. */
11516 /*
11517 * If rm is denoting a register, no more instruction bytes.
11518 */
11519 if (IEM_IS_MODRM_REG_MODE(bRm))
11520 {
11521 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11522 {
11523 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11525 IEM_MC_LOCAL(uint32_t, u32Value);
11526 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11527 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11528 IEM_MC_ADVANCE_RIP_AND_FINISH();
11529 IEM_MC_END();
11530 }
11531 else
11532 {
11533 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 IEM_MC_LOCAL(uint64_t, u64Value);
11536 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11537 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11538 IEM_MC_ADVANCE_RIP_AND_FINISH();
11539 IEM_MC_END();
11540 }
11541 }
11542 else
11543 {
11544 /*
11545 * We're loading a register from memory.
11546 */
11547 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11548 {
11549 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11550 IEM_MC_LOCAL(uint32_t, u32Value);
11551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11554 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11555 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11556 IEM_MC_ADVANCE_RIP_AND_FINISH();
11557 IEM_MC_END();
11558 }
11559 else
11560 {
11561 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11562 IEM_MC_LOCAL(uint64_t, u64Value);
11563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11566 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11567 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11568 IEM_MC_ADVANCE_RIP_AND_FINISH();
11569 IEM_MC_END();
11570 }
11571 }
11572}
11573
11574
11575/** Opcode 0x0f 0xc0. */
11576FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11577{
11578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11579 IEMOP_HLP_MIN_486();
11580 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11581
11582 /*
11583 * If rm is denoting a register, no more instruction bytes.
11584 */
11585 if (IEM_IS_MODRM_REG_MODE(bRm))
11586 {
11587 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11589 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11590 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11591 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11592
11593 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11594 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11595 IEM_MC_REF_EFLAGS(pEFlags);
11596 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11597
11598 IEM_MC_ADVANCE_RIP_AND_FINISH();
11599 IEM_MC_END();
11600 }
11601 else
11602 {
11603 /*
11604 * We're accessing memory.
11605 */
11606 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11607 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11608 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11609 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11610 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11612 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11613
11614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11615 IEMOP_HLP_DONE_DECODING();
11616 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11617 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11618 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11619 IEM_MC_FETCH_EFLAGS(EFlags);
11620 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11621 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11622 else
11623 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11624
11625 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
11626 IEM_MC_COMMIT_EFLAGS(EFlags);
11627 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11628 IEM_MC_ADVANCE_RIP_AND_FINISH();
11629 IEM_MC_END();
11630 }
11631}
11632
11633
11634/** Opcode 0x0f 0xc1. */
11635FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11636{
11637 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11638 IEMOP_HLP_MIN_486();
11639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11640
11641 /*
11642 * If rm is denoting a register, no more instruction bytes.
11643 */
11644 if (IEM_IS_MODRM_REG_MODE(bRm))
11645 {
11646 switch (pVCpu->iem.s.enmEffOpSize)
11647 {
11648 case IEMMODE_16BIT:
11649 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11651 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11652 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11653 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11654
11655 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11656 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11657 IEM_MC_REF_EFLAGS(pEFlags);
11658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11659
11660 IEM_MC_ADVANCE_RIP_AND_FINISH();
11661 IEM_MC_END();
11662 break;
11663
11664 case IEMMODE_32BIT:
11665 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11667 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11668 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11669 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11670
11671 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11672 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11673 IEM_MC_REF_EFLAGS(pEFlags);
11674 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11675
11676 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11677 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11678 IEM_MC_ADVANCE_RIP_AND_FINISH();
11679 IEM_MC_END();
11680 break;
11681
11682 case IEMMODE_64BIT:
11683 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11685 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11686 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11687 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11688
11689 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11690 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11691 IEM_MC_REF_EFLAGS(pEFlags);
11692 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11693
11694 IEM_MC_ADVANCE_RIP_AND_FINISH();
11695 IEM_MC_END();
11696 break;
11697
11698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11699 }
11700 }
11701 else
11702 {
11703 /*
11704 * We're accessing memory.
11705 */
11706 switch (pVCpu->iem.s.enmEffOpSize)
11707 {
11708 case IEMMODE_16BIT:
11709 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11710 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11711 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11712 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11713 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11715 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11716
11717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11718 IEMOP_HLP_DONE_DECODING();
11719 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11720 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11721 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11722 IEM_MC_FETCH_EFLAGS(EFlags);
11723 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11724 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11725 else
11726 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11727
11728 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
11729 IEM_MC_COMMIT_EFLAGS(EFlags);
11730 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11731 IEM_MC_ADVANCE_RIP_AND_FINISH();
11732 IEM_MC_END();
11733 break;
11734
11735 case IEMMODE_32BIT:
11736 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11737 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11738 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11739 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11740 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11742 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11743
11744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11745 IEMOP_HLP_DONE_DECODING();
11746 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11747 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11748 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11749 IEM_MC_FETCH_EFLAGS(EFlags);
11750 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11751 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11752 else
11753 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11754
11755 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
11756 IEM_MC_COMMIT_EFLAGS(EFlags);
11757 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11758 IEM_MC_ADVANCE_RIP_AND_FINISH();
11759 IEM_MC_END();
11760 break;
11761
11762 case IEMMODE_64BIT:
11763 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
11764 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11765 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11766 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11767 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11769 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11770
11771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11772 IEMOP_HLP_DONE_DECODING();
11773 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11774 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11775 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11776 IEM_MC_FETCH_EFLAGS(EFlags);
11777 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11778 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11779 else
11780 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11781
11782 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
11783 IEM_MC_COMMIT_EFLAGS(EFlags);
11784 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11785 IEM_MC_ADVANCE_RIP_AND_FINISH();
11786 IEM_MC_END();
11787 break;
11788
11789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11790 }
11791 }
11792}
11793
11794
11795/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11796FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11797{
11798 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11799
11800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11801 if (IEM_IS_MODRM_REG_MODE(bRm))
11802 {
11803 /*
11804 * XMM, XMM.
11805 */
11806 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11807 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11809 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11810 IEM_MC_LOCAL(X86XMMREG, Dst);
11811 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11812 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11813 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11814 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11815 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11816 IEM_MC_PREPARE_SSE_USAGE();
11817 IEM_MC_REF_MXCSR(pfMxcsr);
11818 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11819 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11820 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11821 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11822 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11823 } IEM_MC_ELSE() {
11824 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11825 } IEM_MC_ENDIF();
11826
11827 IEM_MC_ADVANCE_RIP_AND_FINISH();
11828 IEM_MC_END();
11829 }
11830 else
11831 {
11832 /*
11833 * XMM, [mem128].
11834 */
11835 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11836 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11837 IEM_MC_LOCAL(X86XMMREG, Dst);
11838 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11839 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11840 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11842
11843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11844 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11845 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11847 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11848 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11849
11850 IEM_MC_PREPARE_SSE_USAGE();
11851 IEM_MC_REF_MXCSR(pfMxcsr);
11852 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11853 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11854 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11855 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11856 } IEM_MC_ELSE() {
11857 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11858 } IEM_MC_ENDIF();
11859
11860 IEM_MC_ADVANCE_RIP_AND_FINISH();
11861 IEM_MC_END();
11862 }
11863}
11864
11865
11866/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11867FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11868{
11869 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11870
11871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11872 if (IEM_IS_MODRM_REG_MODE(bRm))
11873 {
11874 /*
11875 * XMM, XMM.
11876 */
11877 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11878 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11880 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11881 IEM_MC_LOCAL(X86XMMREG, Dst);
11882 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11883 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11884 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11885 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11886 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11887 IEM_MC_PREPARE_SSE_USAGE();
11888 IEM_MC_REF_MXCSR(pfMxcsr);
11889 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11890 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11891 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11892 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11893 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11894 } IEM_MC_ELSE() {
11895 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11896 } IEM_MC_ENDIF();
11897
11898 IEM_MC_ADVANCE_RIP_AND_FINISH();
11899 IEM_MC_END();
11900 }
11901 else
11902 {
11903 /*
11904 * XMM, [mem128].
11905 */
11906 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11907 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11908 IEM_MC_LOCAL(X86XMMREG, Dst);
11909 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11910 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11911 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11913
11914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11915 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11916 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11918 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11919 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11920
11921 IEM_MC_PREPARE_SSE_USAGE();
11922 IEM_MC_REF_MXCSR(pfMxcsr);
11923 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11924 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11925 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11926 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11927 } IEM_MC_ELSE() {
11928 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11929 } IEM_MC_ENDIF();
11930
11931 IEM_MC_ADVANCE_RIP_AND_FINISH();
11932 IEM_MC_END();
11933 }
11934}
11935
11936
11937/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11938FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11939{
11940 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11941
11942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11943 if (IEM_IS_MODRM_REG_MODE(bRm))
11944 {
11945 /*
11946 * XMM32, XMM32.
11947 */
11948 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11949 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11951 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11952 IEM_MC_LOCAL(X86XMMREG, Dst);
11953 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11954 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11955 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11956 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11957 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11958 IEM_MC_PREPARE_SSE_USAGE();
11959 IEM_MC_REF_MXCSR(pfMxcsr);
11960 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11961 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11962 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11963 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11964 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11965 } IEM_MC_ELSE() {
11966 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11967 } IEM_MC_ENDIF();
11968
11969 IEM_MC_ADVANCE_RIP_AND_FINISH();
11970 IEM_MC_END();
11971 }
11972 else
11973 {
11974 /*
11975 * XMM32, [mem32].
11976 */
11977 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11978 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11979 IEM_MC_LOCAL(X86XMMREG, Dst);
11980 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11981 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11982 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11984
11985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11986 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11987 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11990 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11991
11992 IEM_MC_PREPARE_SSE_USAGE();
11993 IEM_MC_REF_MXCSR(pfMxcsr);
11994 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11995 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11996 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11997 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11998 } IEM_MC_ELSE() {
11999 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
12000 } IEM_MC_ENDIF();
12001
12002 IEM_MC_ADVANCE_RIP_AND_FINISH();
12003 IEM_MC_END();
12004 }
12005}
12006
12007
12008/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
12009FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
12010{
12011 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12012
12013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12014 if (IEM_IS_MODRM_REG_MODE(bRm))
12015 {
12016 /*
12017 * XMM64, XMM64.
12018 */
12019 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12020 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12022 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12023 IEM_MC_LOCAL(X86XMMREG, Dst);
12024 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12025 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12026 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12027 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12028 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12029 IEM_MC_PREPARE_SSE_USAGE();
12030 IEM_MC_REF_MXCSR(pfMxcsr);
12031 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
12032 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
12033 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12034 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12035 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12036 } IEM_MC_ELSE() {
12037 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12038 } IEM_MC_ENDIF();
12039
12040 IEM_MC_ADVANCE_RIP_AND_FINISH();
12041 IEM_MC_END();
12042 }
12043 else
12044 {
12045 /*
12046 * XMM64, [mem64].
12047 */
12048 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12049 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12050 IEM_MC_LOCAL(X86XMMREG, Dst);
12051 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12052 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12053 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12055
12056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12057 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12058 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12060 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12061 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12062
12063 IEM_MC_PREPARE_SSE_USAGE();
12064 IEM_MC_REF_MXCSR(pfMxcsr);
12065 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
12066 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12067 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12068 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12069 } IEM_MC_ELSE() {
12070 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12071 } IEM_MC_ENDIF();
12072
12073 IEM_MC_ADVANCE_RIP_AND_FINISH();
12074 IEM_MC_END();
12075 }
12076}
12077
12078
12079/** Opcode 0x0f 0xc3. */
12080FNIEMOP_DEF(iemOp_movnti_My_Gy)
12081{
12082 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12083
12084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12085
12086 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12087 if (IEM_IS_MODRM_MEM_MODE(bRm))
12088 {
12089 switch (pVCpu->iem.s.enmEffOpSize)
12090 {
12091 case IEMMODE_32BIT:
12092 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
12093 IEM_MC_LOCAL(uint32_t, u32Value);
12094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12095
12096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12098
12099 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12100 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12101 IEM_MC_ADVANCE_RIP_AND_FINISH();
12102 IEM_MC_END();
12103 break;
12104
12105 case IEMMODE_64BIT:
12106 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
12107 IEM_MC_LOCAL(uint64_t, u64Value);
12108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12109
12110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12112
12113 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12114 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12115 IEM_MC_ADVANCE_RIP_AND_FINISH();
12116 IEM_MC_END();
12117 break;
12118
12119 case IEMMODE_16BIT:
12120 /** @todo check this form. */
12121 IEMOP_RAISE_INVALID_OPCODE_RET();
12122
12123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12124 }
12125 }
12126 else
12127 IEMOP_RAISE_INVALID_OPCODE_RET();
12128}
12129
12130
12131/* Opcode 0x66 0x0f 0xc3 - invalid */
12132/* Opcode 0xf3 0x0f 0xc3 - invalid */
12133/* Opcode 0xf2 0x0f 0xc3 - invalid */
12134
12135
12136/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12137FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12138{
12139 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12141 if (IEM_IS_MODRM_REG_MODE(bRm))
12142 {
12143 /*
12144 * Register, register.
12145 */
12146 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12147 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12150 IEM_MC_ARG(uint16_t, u16Src, 1);
12151 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12152 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12153 IEM_MC_PREPARE_FPU_USAGE();
12154 IEM_MC_FPU_TO_MMX_MODE();
12155 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12156 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12157 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12158 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12159 IEM_MC_ADVANCE_RIP_AND_FINISH();
12160 IEM_MC_END();
12161 }
12162 else
12163 {
12164 /*
12165 * Register, memory.
12166 */
12167 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12168 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12169 IEM_MC_ARG(uint16_t, u16Src, 1);
12170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12171
12172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12173 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12174 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12176 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12177 IEM_MC_PREPARE_FPU_USAGE();
12178 IEM_MC_FPU_TO_MMX_MODE();
12179
12180 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12181 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12183 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12184 IEM_MC_ADVANCE_RIP_AND_FINISH();
12185 IEM_MC_END();
12186 }
12187}
12188
12189
12190/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12191FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12192{
12193 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12195 if (IEM_IS_MODRM_REG_MODE(bRm))
12196 {
12197 /*
12198 * Register, register.
12199 */
12200 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12201 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12203 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12204 IEM_MC_ARG(uint16_t, u16Src, 1);
12205 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12206 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12207 IEM_MC_PREPARE_SSE_USAGE();
12208 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12209 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12210 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12211 IEM_MC_ADVANCE_RIP_AND_FINISH();
12212 IEM_MC_END();
12213 }
12214 else
12215 {
12216 /*
12217 * Register, memory.
12218 */
12219 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12220 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12221 IEM_MC_ARG(uint16_t, u16Src, 1);
12222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12223
12224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12225 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12226 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12228 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12229 IEM_MC_PREPARE_SSE_USAGE();
12230
12231 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12232 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12233 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12234 IEM_MC_ADVANCE_RIP_AND_FINISH();
12235 IEM_MC_END();
12236 }
12237}
12238
12239
12240/* Opcode 0xf3 0x0f 0xc4 - invalid */
12241/* Opcode 0xf2 0x0f 0xc4 - invalid */
12242
12243
12244/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12245FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12246{
12247 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12249 if (IEM_IS_MODRM_REG_MODE(bRm))
12250 {
12251 /*
12252 * Greg32, MMX, imm8.
12253 */
12254 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12255 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12257 IEM_MC_LOCAL(uint16_t, u16Dst);
12258 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12259 IEM_MC_ARG(uint64_t, u64Src, 1);
12260 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12261 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12262 IEM_MC_PREPARE_FPU_USAGE();
12263 IEM_MC_FPU_TO_MMX_MODE();
12264 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12265 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12266 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12267 IEM_MC_ADVANCE_RIP_AND_FINISH();
12268 IEM_MC_END();
12269 }
12270 /* No memory operand. */
12271 else
12272 IEMOP_RAISE_INVALID_OPCODE_RET();
12273}
12274
12275
12276/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12277FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12278{
12279 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12281 if (IEM_IS_MODRM_REG_MODE(bRm))
12282 {
12283 /*
12284 * Greg32, XMM, imm8.
12285 */
12286 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12287 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12289 IEM_MC_LOCAL(uint16_t, u16Dst);
12290 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12291 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12292 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12293 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12294 IEM_MC_PREPARE_SSE_USAGE();
12295 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12296 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12297 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12298 IEM_MC_ADVANCE_RIP_AND_FINISH();
12299 IEM_MC_END();
12300 }
12301 /* No memory operand. */
12302 else
12303 IEMOP_RAISE_INVALID_OPCODE_RET();
12304}
12305
12306
12307/* Opcode 0xf3 0x0f 0xc5 - invalid */
12308/* Opcode 0xf2 0x0f 0xc5 - invalid */
12309
12310
12311/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12312FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12313{
12314 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12316 if (IEM_IS_MODRM_REG_MODE(bRm))
12317 {
12318 /*
12319 * XMM, XMM, imm8.
12320 */
12321 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12322 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12324 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12325 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12326 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12328 IEM_MC_PREPARE_SSE_USAGE();
12329 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12330 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12331 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12332 IEM_MC_ADVANCE_RIP_AND_FINISH();
12333 IEM_MC_END();
12334 }
12335 else
12336 {
12337 /*
12338 * XMM, [mem128], imm8.
12339 */
12340 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12341 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12342 IEM_MC_LOCAL(RTUINT128U, uSrc);
12343 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12345
12346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12347 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12348 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12350 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12351 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12352
12353 IEM_MC_PREPARE_SSE_USAGE();
12354 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12355 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12356
12357 IEM_MC_ADVANCE_RIP_AND_FINISH();
12358 IEM_MC_END();
12359 }
12360}
12361
12362
12363/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12364FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12365{
12366 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12368 if (IEM_IS_MODRM_REG_MODE(bRm))
12369 {
12370 /*
12371 * XMM, XMM, imm8.
12372 */
12373 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12374 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12376 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12377 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12378 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12379 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12380 IEM_MC_PREPARE_SSE_USAGE();
12381 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12382 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12383 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12384 IEM_MC_ADVANCE_RIP_AND_FINISH();
12385 IEM_MC_END();
12386 }
12387 else
12388 {
12389 /*
12390 * XMM, [mem128], imm8.
12391 */
12392 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12393 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12394 IEM_MC_LOCAL(RTUINT128U, uSrc);
12395 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12397
12398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12399 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12400 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12403 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12404
12405 IEM_MC_PREPARE_SSE_USAGE();
12406 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12407 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12408
12409 IEM_MC_ADVANCE_RIP_AND_FINISH();
12410 IEM_MC_END();
12411 }
12412}
12413
12414
12415/* Opcode 0xf3 0x0f 0xc6 - invalid */
12416/* Opcode 0xf2 0x0f 0xc6 - invalid */
12417
12418
12419/** Opcode 0x0f 0xc7 !11/1. */
12420FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12421{
12422 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12423
12424 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
12425 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12426 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12427 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12428 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12429 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12430 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12432 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12433
12434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12435 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b);
12436 IEM_MC_MEM_MAP_U64_RW(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12437
12438 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12439 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12440 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12441
12442 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12443 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12444 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12445
12446 IEM_MC_FETCH_EFLAGS(EFlags);
12447 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12448 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12449 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12450 else
12451 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12452
12453 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64MemDst, bUnmapInfo);
12454 IEM_MC_COMMIT_EFLAGS(EFlags);
12455 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12456 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12457 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12458 } IEM_MC_ENDIF();
12459 IEM_MC_ADVANCE_RIP_AND_FINISH();
12460
12461 IEM_MC_END();
12462}
12463
12464
12465/** Opcode REX.W 0x0f 0xc7 !11/1. */
12466FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12467{
12468 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12469 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12470 {
12471 /*
12472 * This is hairy, very hairy macro fun. We're walking a fine line
12473 * here to make the code parsable by IEMAllInstPython.py and fit into
12474 * the patterns IEMAllThrdPython.py requires for the code morphing.
12475 */
12476#define BODY_CMPXCHG16B_HEAD \
12477 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0); \
12478 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12479 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1); \
12480 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2); \
12481 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3); \
12482 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12483 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12485 \
12486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12487 IEMOP_HLP_DONE_DECODING(); \
12488 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12489 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12490 \
12491 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX); \
12492 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX); \
12493 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx); \
12494 \
12495 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX); \
12496 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX); \
12497 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx); \
12498 \
12499 IEM_MC_FETCH_EFLAGS(EFlags); \
12500 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xAX); \
12501 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xDX)
12502
12503#define BODY_CMPXCHG16B_TAIL \
12504 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW); \
12505 IEM_MC_COMMIT_EFLAGS(EFlags); \
12506 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12507 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo); \
12508 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi); \
12509 } IEM_MC_ENDIF(); \
12510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12511 IEM_MC_END()
12512
12513#ifdef RT_ARCH_AMD64
12514 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12515 {
12516 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12517 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12518 {
12519 BODY_CMPXCHG16B_HEAD;
12520 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12521 BODY_CMPXCHG16B_TAIL;
12522 }
12523 else
12524 {
12525 BODY_CMPXCHG16B_HEAD;
12526 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12527 BODY_CMPXCHG16B_TAIL;
12528 }
12529 }
12530 else
12531 { /* (see comments in #else case below) */
12532 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12533 {
12534 BODY_CMPXCHG16B_HEAD;
12535 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12536 BODY_CMPXCHG16B_TAIL;
12537 }
12538 else
12539 {
12540 BODY_CMPXCHG16B_HEAD;
12541 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12542 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12543 IEM_MC_END();
12544 }
12545 }
12546
12547#elif defined(RT_ARCH_ARM64)
12548 /** @todo may require fallback for unaligned accesses... */
12549 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12550 {
12551 BODY_CMPXCHG16B_HEAD;
12552 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12553 BODY_CMPXCHG16B_TAIL;
12554 }
12555 else
12556 {
12557 BODY_CMPXCHG16B_HEAD;
12558 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12559 BODY_CMPXCHG16B_TAIL;
12560 }
12561
12562#else
12563 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12564 accesses and not all all atomic, which works fine on in UNI CPU guest
12565 configuration (ignoring DMA). If guest SMP is active we have no choice
12566 but to use a rendezvous callback here. Sigh. */
12567 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12568 {
12569 BODY_CMPXCHG16B_HEAD;
12570 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12571 BODY_CMPXCHG16B_TAIL;
12572 }
12573 else
12574 {
12575 BODY_CMPXCHG16B_HEAD;
12576 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_cmpxchg16b_fallback_rendezvous,
12577 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12578 IEM_MC_END();
12579 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12580 }
12581#endif
12582
12583#undef BODY_CMPXCHG16B
12584 }
12585 Log(("cmpxchg16b -> #UD\n"));
12586 IEMOP_RAISE_INVALID_OPCODE_RET();
12587}
12588
12589FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12590{
12591 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12592 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12593 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12594}
12595
12596
12597/** Opcode 0x0f 0xc7 11/6. */
12598FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12599{
12600 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12601 IEMOP_RAISE_INVALID_OPCODE_RET();
12602
12603 if (IEM_IS_MODRM_REG_MODE(bRm))
12604 {
12605 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12607 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12608 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12609 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_RM(pVCpu, bRm));
12610 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_rdrand, iReg, enmEffOpSize);
12611 IEM_MC_END();
12612 }
12613 /* Register only. */
12614 else
12615 IEMOP_RAISE_INVALID_OPCODE_RET();
12616}
12617
12618/** Opcode 0x0f 0xc7 !11/6. */
12619#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12620FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12621{
12622 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12623 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12624 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12625 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12626 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12628 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12629 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12630 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12631 IEM_MC_END();
12632}
12633#else
12634FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12635#endif
12636
12637/** Opcode 0x66 0x0f 0xc7 !11/6. */
12638#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12639FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12640{
12641 IEMOP_MNEMONIC(vmclear, "vmclear");
12642 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12643 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12644 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12645 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12647 IEMOP_HLP_DONE_DECODING();
12648 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12649 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12650 IEM_MC_END();
12651}
12652#else
12653FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12654#endif
12655
12656/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12657#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12658FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12659{
12660 IEMOP_MNEMONIC(vmxon, "vmxon");
12661 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12662 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12663 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12665 IEMOP_HLP_DONE_DECODING();
12666 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12667 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12668 IEM_MC_END();
12669}
12670#else
12671FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12672#endif
12673
12674/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12675#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12676FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12677{
12678 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12679 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12680 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12681 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12682 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12684 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12685 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12686 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12687 IEM_MC_END();
12688}
12689#else
12690FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12691#endif
12692
12693/** Opcode 0x0f 0xc7 11/7. */
12694FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12695{
12696 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12697 IEMOP_RAISE_INVALID_OPCODE_RET();
12698
12699 if (IEM_IS_MODRM_REG_MODE(bRm))
12700 {
12701 /* register destination. */
12702 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12704 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12705 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12706 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_RM(pVCpu, bRm));
12707 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_rdseed, iReg, enmEffOpSize);
12708 IEM_MC_END();
12709 }
12710 /* Register only. */
12711 else
12712 IEMOP_RAISE_INVALID_OPCODE_RET();
12713}
12714
12715/**
12716 * Group 9 jump table for register variant.
12717 */
12718IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12719{ /* pfx: none, 066h, 0f3h, 0f2h */
12720 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12721 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12722 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12723 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12724 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12725 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12726 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12727 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12728};
12729AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12730
12731
12732/**
12733 * Group 9 jump table for memory variant.
12734 */
12735IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12736{ /* pfx: none, 066h, 0f3h, 0f2h */
12737 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12738 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12739 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12740 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12741 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12742 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12743 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12744 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12745};
12746AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12747
12748
12749/** Opcode 0x0f 0xc7. */
12750FNIEMOP_DEF(iemOp_Grp9)
12751{
12752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12753 if (IEM_IS_MODRM_REG_MODE(bRm))
12754 /* register, register */
12755 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12756 + pVCpu->iem.s.idxPrefix], bRm);
12757 /* memory, register */
12758 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12759 + pVCpu->iem.s.idxPrefix], bRm);
12760}
12761
12762
12763/**
12764 * Common 'bswap register' helper.
12765 */
12766FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12767{
12768 switch (pVCpu->iem.s.enmEffOpSize)
12769 {
12770 case IEMMODE_16BIT:
12771 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12773 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12774 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12775 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12776 IEM_MC_ADVANCE_RIP_AND_FINISH();
12777 IEM_MC_END();
12778 break;
12779
12780 case IEMMODE_32BIT:
12781 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12783 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12784 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12785 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12786 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12787 IEM_MC_ADVANCE_RIP_AND_FINISH();
12788 IEM_MC_END();
12789 break;
12790
12791 case IEMMODE_64BIT:
12792 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
12793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12794 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12795 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12796 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12797 IEM_MC_ADVANCE_RIP_AND_FINISH();
12798 IEM_MC_END();
12799 break;
12800
12801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12802 }
12803}
12804
12805
12806/** Opcode 0x0f 0xc8. */
12807FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12808{
12809 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12810 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12811 prefix. REX.B is the correct prefix it appears. For a parallel
12812 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12813 IEMOP_HLP_MIN_486();
12814 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12815}
12816
12817
12818/** Opcode 0x0f 0xc9. */
12819FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12820{
12821 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12822 IEMOP_HLP_MIN_486();
12823 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12824}
12825
12826
12827/** Opcode 0x0f 0xca. */
12828FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12829{
12830 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12831 IEMOP_HLP_MIN_486();
12832 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12833}
12834
12835
12836/** Opcode 0x0f 0xcb. */
12837FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12838{
12839 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12840 IEMOP_HLP_MIN_486();
12841 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12842}
12843
12844
12845/** Opcode 0x0f 0xcc. */
12846FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12847{
12848 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12849 IEMOP_HLP_MIN_486();
12850 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12851}
12852
12853
12854/** Opcode 0x0f 0xcd. */
12855FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12856{
12857 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12858 IEMOP_HLP_MIN_486();
12859 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12860}
12861
12862
12863/** Opcode 0x0f 0xce. */
12864FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12865{
12866 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12867 IEMOP_HLP_MIN_486();
12868 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12869}
12870
12871
12872/** Opcode 0x0f 0xcf. */
12873FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12874{
12875 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12876 IEMOP_HLP_MIN_486();
12877 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12878}
12879
12880
12881/* Opcode 0x0f 0xd0 - invalid */
12882
12883
12884/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12885FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12886{
12887 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12888 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12889}
12890
12891
12892/* Opcode 0xf3 0x0f 0xd0 - invalid */
12893
12894
12895/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12896FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12897{
12898 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12899 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12900}
12901
12902
12903
12904/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12905FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12906{
12907 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12908 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12909}
12910
12911/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12912FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12913{
12914 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12915 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12916}
12917
12918/* Opcode 0xf3 0x0f 0xd1 - invalid */
12919/* Opcode 0xf2 0x0f 0xd1 - invalid */
12920
12921/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12922FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12923{
12924 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12925 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12926}
12927
12928
12929/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12930FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12931{
12932 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12933 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12934}
12935
12936
12937/* Opcode 0xf3 0x0f 0xd2 - invalid */
12938/* Opcode 0xf2 0x0f 0xd2 - invalid */
12939
12940/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12941FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12942{
12943 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12944 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12945}
12946
12947
12948/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12949FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12950{
12951 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12952 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12953}
12954
12955
12956/* Opcode 0xf3 0x0f 0xd3 - invalid */
12957/* Opcode 0xf2 0x0f 0xd3 - invalid */
12958
12959
12960/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12961FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12962{
12963 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12964 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12965}
12966
12967
12968/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12969FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12970{
12971 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12972 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12973}
12974
12975
12976/* Opcode 0xf3 0x0f 0xd4 - invalid */
12977/* Opcode 0xf2 0x0f 0xd4 - invalid */
12978
12979/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12980FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12981{
12982 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12983 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12984}
12985
12986/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12987FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12988{
12989 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12990 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12991}
12992
12993
12994/* Opcode 0xf3 0x0f 0xd5 - invalid */
12995/* Opcode 0xf2 0x0f 0xd5 - invalid */
12996
12997/* Opcode 0x0f 0xd6 - invalid */
12998
12999/**
13000 * @opcode 0xd6
13001 * @oppfx 0x66
13002 * @opcpuid sse2
13003 * @opgroup og_sse2_pcksclr_datamove
13004 * @opxcpttype none
13005 * @optest op1=-1 op2=2 -> op1=2
13006 * @optest op1=0 op2=-42 -> op1=-42
13007 */
13008FNIEMOP_DEF(iemOp_movq_Wq_Vq)
13009{
13010 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13012 if (IEM_IS_MODRM_REG_MODE(bRm))
13013 {
13014 /*
13015 * Register, register.
13016 */
13017 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13019 IEM_MC_LOCAL(uint64_t, uSrc);
13020
13021 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13023
13024 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13025 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
13026
13027 IEM_MC_ADVANCE_RIP_AND_FINISH();
13028 IEM_MC_END();
13029 }
13030 else
13031 {
13032 /*
13033 * Memory, register.
13034 */
13035 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13036 IEM_MC_LOCAL(uint64_t, uSrc);
13037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13038
13039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13041 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13042 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13043
13044 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13045 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13046
13047 IEM_MC_ADVANCE_RIP_AND_FINISH();
13048 IEM_MC_END();
13049 }
13050}
13051
13052
13053/**
13054 * @opcode 0xd6
13055 * @opcodesub 11 mr/reg
13056 * @oppfx f3
13057 * @opcpuid sse2
13058 * @opgroup og_sse2_simdint_datamove
13059 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13060 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13061 */
13062FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13063{
13064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13065 if (IEM_IS_MODRM_REG_MODE(bRm))
13066 {
13067 /*
13068 * Register, register.
13069 */
13070 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13071 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13073 IEM_MC_LOCAL(uint64_t, uSrc);
13074
13075 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13076 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13077 IEM_MC_FPU_TO_MMX_MODE();
13078
13079 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13080 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13081
13082 IEM_MC_ADVANCE_RIP_AND_FINISH();
13083 IEM_MC_END();
13084 }
13085
13086 /**
13087 * @opdone
13088 * @opmnemonic udf30fd6mem
13089 * @opcode 0xd6
13090 * @opcodesub !11 mr/reg
13091 * @oppfx f3
13092 * @opunused intel-modrm
13093 * @opcpuid sse
13094 * @optest ->
13095 */
13096 else
13097 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13098}
13099
13100
13101/**
13102 * @opcode 0xd6
13103 * @opcodesub 11 mr/reg
13104 * @oppfx f2
13105 * @opcpuid sse2
13106 * @opgroup og_sse2_simdint_datamove
13107 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13108 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13109 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13110 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13111 * @optest op1=-42 op2=0xfedcba9876543210
13112 * -> op1=0xfedcba9876543210 ftw=0xff
13113 */
13114FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13115{
13116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13117 if (IEM_IS_MODRM_REG_MODE(bRm))
13118 {
13119 /*
13120 * Register, register.
13121 */
13122 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13123 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13125 IEM_MC_LOCAL(uint64_t, uSrc);
13126
13127 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13128 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13129 IEM_MC_FPU_TO_MMX_MODE();
13130
13131 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13132 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13133
13134 IEM_MC_ADVANCE_RIP_AND_FINISH();
13135 IEM_MC_END();
13136 }
13137
13138 /**
13139 * @opdone
13140 * @opmnemonic udf20fd6mem
13141 * @opcode 0xd6
13142 * @opcodesub !11 mr/reg
13143 * @oppfx f2
13144 * @opunused intel-modrm
13145 * @opcpuid sse
13146 * @optest ->
13147 */
13148 else
13149 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13150}
13151
13152
13153/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13154FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13155{
13156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13157 /* Docs says register only. */
13158 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13159 {
13160 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13161 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13162 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13164 IEM_MC_ARG(uint64_t *, puDst, 0);
13165 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13166 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13167 IEM_MC_PREPARE_FPU_USAGE();
13168 IEM_MC_FPU_TO_MMX_MODE();
13169
13170 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13171 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13172 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13173
13174 IEM_MC_ADVANCE_RIP_AND_FINISH();
13175 IEM_MC_END();
13176 }
13177 else
13178 IEMOP_RAISE_INVALID_OPCODE_RET();
13179}
13180
13181
13182/** Opcode 0x66 0x0f 0xd7 - */
13183FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13184{
13185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13186 /* Docs says register only. */
13187 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13188 {
13189 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13190 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13191 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13193 IEM_MC_ARG(uint64_t *, puDst, 0);
13194 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13195 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13196 IEM_MC_PREPARE_SSE_USAGE();
13197 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13198 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13199 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13200 IEM_MC_ADVANCE_RIP_AND_FINISH();
13201 IEM_MC_END();
13202 }
13203 else
13204 IEMOP_RAISE_INVALID_OPCODE_RET();
13205}
13206
13207
13208/* Opcode 0xf3 0x0f 0xd7 - invalid */
13209/* Opcode 0xf2 0x0f 0xd7 - invalid */
13210
13211
13212/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13213FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13214{
13215 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13216 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13217}
13218
13219
13220/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13221FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13222{
13223 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13224 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13225}
13226
13227
13228/* Opcode 0xf3 0x0f 0xd8 - invalid */
13229/* Opcode 0xf2 0x0f 0xd8 - invalid */
13230
13231/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13232FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13233{
13234 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13235 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13236}
13237
13238
13239/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13240FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13241{
13242 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13243 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13244}
13245
13246
13247/* Opcode 0xf3 0x0f 0xd9 - invalid */
13248/* Opcode 0xf2 0x0f 0xd9 - invalid */
13249
13250/** Opcode 0x0f 0xda - pminub Pq, Qq */
13251FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13252{
13253 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13254 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13255}
13256
13257
13258/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13259FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13260{
13261 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13262 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13263}
13264
13265/* Opcode 0xf3 0x0f 0xda - invalid */
13266/* Opcode 0xf2 0x0f 0xda - invalid */
13267
13268/** Opcode 0x0f 0xdb - pand Pq, Qq */
13269FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13270{
13271 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13272 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13273}
13274
13275
13276/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13277FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13278{
13279 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13280 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13281}
13282
13283
13284/* Opcode 0xf3 0x0f 0xdb - invalid */
13285/* Opcode 0xf2 0x0f 0xdb - invalid */
13286
13287/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13288FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13289{
13290 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13291 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13292}
13293
13294
13295/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13296FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13297{
13298 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13299 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13300}
13301
13302
13303/* Opcode 0xf3 0x0f 0xdc - invalid */
13304/* Opcode 0xf2 0x0f 0xdc - invalid */
13305
13306/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13307FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13308{
13309 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13310 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13311}
13312
13313
13314/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13315FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13316{
13317 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13318 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13319}
13320
13321
13322/* Opcode 0xf3 0x0f 0xdd - invalid */
13323/* Opcode 0xf2 0x0f 0xdd - invalid */
13324
13325/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13326FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13327{
13328 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13329 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13330}
13331
13332
13333/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13334FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13335{
13336 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13337 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13338}
13339
13340/* Opcode 0xf3 0x0f 0xde - invalid */
13341/* Opcode 0xf2 0x0f 0xde - invalid */
13342
13343
13344/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13345FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13346{
13347 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13348 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13349}
13350
13351
13352/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13353FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13354{
13355 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13356 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13357}
13358
13359
13360/* Opcode 0xf3 0x0f 0xdf - invalid */
13361/* Opcode 0xf2 0x0f 0xdf - invalid */
13362
13363/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13364FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13365{
13366 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13367 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13368}
13369
13370
13371/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13372FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13373{
13374 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13375 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13376}
13377
13378
13379/* Opcode 0xf3 0x0f 0xe0 - invalid */
13380/* Opcode 0xf2 0x0f 0xe0 - invalid */
13381
13382/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13383FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13384{
13385 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13386 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13387}
13388
13389
13390/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13391FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13392{
13393 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13394 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13395}
13396
13397
13398/* Opcode 0xf3 0x0f 0xe1 - invalid */
13399/* Opcode 0xf2 0x0f 0xe1 - invalid */
13400
13401/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13402FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13403{
13404 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13405 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13406}
13407
13408
13409/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13410FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13411{
13412 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13413 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13414}
13415
13416
13417/* Opcode 0xf3 0x0f 0xe2 - invalid */
13418/* Opcode 0xf2 0x0f 0xe2 - invalid */
13419
13420/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13421FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13422{
13423 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13424 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13425}
13426
13427
13428/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13429FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13430{
13431 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13432 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13433}
13434
13435
13436/* Opcode 0xf3 0x0f 0xe3 - invalid */
13437/* Opcode 0xf2 0x0f 0xe3 - invalid */
13438
13439/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13440FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13441{
13442 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13443 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13444}
13445
13446
13447/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13448FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13449{
13450 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13451 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13452}
13453
13454
13455/* Opcode 0xf3 0x0f 0xe4 - invalid */
13456/* Opcode 0xf2 0x0f 0xe4 - invalid */
13457
13458/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13459FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13460{
13461 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13462 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13463}
13464
13465
13466/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13467FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13468{
13469 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13470 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13471}
13472
13473
13474/* Opcode 0xf3 0x0f 0xe5 - invalid */
13475/* Opcode 0xf2 0x0f 0xe5 - invalid */
13476/* Opcode 0x0f 0xe6 - invalid */
13477
13478
13479/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13480FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13481{
13482 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13483 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13484}
13485
13486
13487/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13488FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13489{
13490 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13491 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13492}
13493
13494
13495/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13496FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13497{
13498 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13499 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13500}
13501
13502
13503/**
13504 * @opcode 0xe7
13505 * @opcodesub !11 mr/reg
13506 * @oppfx none
13507 * @opcpuid sse
13508 * @opgroup og_sse1_cachect
13509 * @opxcpttype none
13510 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13511 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13512 */
13513FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13514{
13515 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13517 if (IEM_IS_MODRM_MEM_MODE(bRm))
13518 {
13519 /* Register, memory. */
13520 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13521 IEM_MC_LOCAL(uint64_t, uSrc);
13522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13523
13524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13526 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13527 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13528 IEM_MC_FPU_TO_MMX_MODE();
13529
13530 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13531 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13532
13533 IEM_MC_ADVANCE_RIP_AND_FINISH();
13534 IEM_MC_END();
13535 }
13536 /**
13537 * @opdone
13538 * @opmnemonic ud0fe7reg
13539 * @opcode 0xe7
13540 * @opcodesub 11 mr/reg
13541 * @oppfx none
13542 * @opunused immediate
13543 * @opcpuid sse
13544 * @optest ->
13545 */
13546 else
13547 IEMOP_RAISE_INVALID_OPCODE_RET();
13548}
13549
13550/**
13551 * @opcode 0xe7
13552 * @opcodesub !11 mr/reg
13553 * @oppfx 0x66
13554 * @opcpuid sse2
13555 * @opgroup og_sse2_cachect
13556 * @opxcpttype 1
13557 * @optest op1=-1 op2=2 -> op1=2
13558 * @optest op1=0 op2=-42 -> op1=-42
13559 */
13560FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13561{
13562 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13564 if (IEM_IS_MODRM_MEM_MODE(bRm))
13565 {
13566 /* Register, memory. */
13567 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13568 IEM_MC_LOCAL(RTUINT128U, uSrc);
13569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13570
13571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13573 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13574 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13575
13576 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13577 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13578
13579 IEM_MC_ADVANCE_RIP_AND_FINISH();
13580 IEM_MC_END();
13581 }
13582
13583 /**
13584 * @opdone
13585 * @opmnemonic ud660fe7reg
13586 * @opcode 0xe7
13587 * @opcodesub 11 mr/reg
13588 * @oppfx 0x66
13589 * @opunused immediate
13590 * @opcpuid sse
13591 * @optest ->
13592 */
13593 else
13594 IEMOP_RAISE_INVALID_OPCODE_RET();
13595}
13596
13597/* Opcode 0xf3 0x0f 0xe7 - invalid */
13598/* Opcode 0xf2 0x0f 0xe7 - invalid */
13599
13600
13601/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13602FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13603{
13604 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13605 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13606}
13607
13608
13609/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13610FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13611{
13612 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13613 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13614}
13615
13616
13617/* Opcode 0xf3 0x0f 0xe8 - invalid */
13618/* Opcode 0xf2 0x0f 0xe8 - invalid */
13619
13620/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13621FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13622{
13623 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13624 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13625}
13626
13627
13628/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13629FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13630{
13631 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13632 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13633}
13634
13635
13636/* Opcode 0xf3 0x0f 0xe9 - invalid */
13637/* Opcode 0xf2 0x0f 0xe9 - invalid */
13638
13639
13640/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13641FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13642{
13643 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13644 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13645}
13646
13647
13648/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13649FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13650{
13651 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13652 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13653}
13654
13655
13656/* Opcode 0xf3 0x0f 0xea - invalid */
13657/* Opcode 0xf2 0x0f 0xea - invalid */
13658
13659
13660/** Opcode 0x0f 0xeb - por Pq, Qq */
13661FNIEMOP_DEF(iemOp_por_Pq_Qq)
13662{
13663 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13664 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13665}
13666
13667
13668/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13669FNIEMOP_DEF(iemOp_por_Vx_Wx)
13670{
13671 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13672 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13673}
13674
13675
13676/* Opcode 0xf3 0x0f 0xeb - invalid */
13677/* Opcode 0xf2 0x0f 0xeb - invalid */
13678
13679/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13680FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13681{
13682 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13683 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13684}
13685
13686
13687/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13688FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13689{
13690 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13691 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13692}
13693
13694
13695/* Opcode 0xf3 0x0f 0xec - invalid */
13696/* Opcode 0xf2 0x0f 0xec - invalid */
13697
13698/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13699FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13700{
13701 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13702 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13703}
13704
13705
13706/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13707FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13708{
13709 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13710 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13711}
13712
13713
13714/* Opcode 0xf3 0x0f 0xed - invalid */
13715/* Opcode 0xf2 0x0f 0xed - invalid */
13716
13717
13718/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13719FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13720{
13721 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13722 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13723}
13724
13725
13726/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13727FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13728{
13729 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13730 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13731}
13732
13733
13734/* Opcode 0xf3 0x0f 0xee - invalid */
13735/* Opcode 0xf2 0x0f 0xee - invalid */
13736
13737
13738/** Opcode 0x0f 0xef - pxor Pq, Qq */
13739FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13740{
13741 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13742 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13743}
13744
13745
13746/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13747FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13748{
13749 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13750 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13751}
13752
13753
13754/* Opcode 0xf3 0x0f 0xef - invalid */
13755/* Opcode 0xf2 0x0f 0xef - invalid */
13756
13757/* Opcode 0x0f 0xf0 - invalid */
13758/* Opcode 0x66 0x0f 0xf0 - invalid */
13759
13760
13761/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13762FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13763{
13764 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13766 if (IEM_IS_MODRM_REG_MODE(bRm))
13767 {
13768 /*
13769 * Register, register - (not implemented, assuming it raises \#UD).
13770 */
13771 IEMOP_RAISE_INVALID_OPCODE_RET();
13772 }
13773 else
13774 {
13775 /*
13776 * Register, memory.
13777 */
13778 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13779 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13781
13782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13784 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13785 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13786 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13787 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13788
13789 IEM_MC_ADVANCE_RIP_AND_FINISH();
13790 IEM_MC_END();
13791 }
13792}
13793
13794
13795/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13796FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13797{
13798 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13799 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13800}
13801
13802
13803/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13804FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13805{
13806 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13807 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13808}
13809
13810
13811/* Opcode 0xf2 0x0f 0xf1 - invalid */
13812
13813/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13814FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13815{
13816 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13817 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13818}
13819
13820
13821/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13822FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13823{
13824 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13825 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13826}
13827
13828
13829/* Opcode 0xf2 0x0f 0xf2 - invalid */
13830
13831/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13832FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13833{
13834 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13835 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13836}
13837
13838
13839/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13840FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13841{
13842 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13843 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13844}
13845
13846/* Opcode 0xf2 0x0f 0xf3 - invalid */
13847
13848/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13849FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13850{
13851 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13852 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13853}
13854
13855
13856/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13857FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13858{
13859 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13860 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13861}
13862
13863
13864/* Opcode 0xf2 0x0f 0xf4 - invalid */
13865
13866/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13867FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13868{
13869 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13870 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13871}
13872
13873
13874/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13875FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13876{
13877 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13878 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13879}
13880
13881/* Opcode 0xf2 0x0f 0xf5 - invalid */
13882
13883/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13884FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13885{
13886 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13887 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13888}
13889
13890
13891/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13892FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13893{
13894 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13895 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13896}
13897
13898
13899/* Opcode 0xf2 0x0f 0xf6 - invalid */
13900
13901/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13902FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13903/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13904FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13905/* Opcode 0xf2 0x0f 0xf7 - invalid */
13906
13907
13908/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13909FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13910{
13911 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13912 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13913}
13914
13915
13916/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13917FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13918{
13919 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13920 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13921}
13922
13923
13924/* Opcode 0xf2 0x0f 0xf8 - invalid */
13925
13926
13927/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13928FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13929{
13930 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13931 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13932}
13933
13934
13935/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13936FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13937{
13938 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13939 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13940}
13941
13942
13943/* Opcode 0xf2 0x0f 0xf9 - invalid */
13944
13945
13946/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13947FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13948{
13949 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13950 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13951}
13952
13953
13954/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13955FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13956{
13957 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13958 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13959}
13960
13961
13962/* Opcode 0xf2 0x0f 0xfa - invalid */
13963
13964
13965/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13966FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13967{
13968 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13969 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13970}
13971
13972
13973/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13974FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13975{
13976 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13977 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13978}
13979
13980
13981/* Opcode 0xf2 0x0f 0xfb - invalid */
13982
13983
13984/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13985FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13986{
13987 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13988 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13989}
13990
13991
13992/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13993FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13994{
13995 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13996 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13997}
13998
13999
14000/* Opcode 0xf2 0x0f 0xfc - invalid */
14001
14002
14003/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14004FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14005{
14006 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14007 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
14008}
14009
14010
14011/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14012FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14013{
14014 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14015 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
14016}
14017
14018
14019/* Opcode 0xf2 0x0f 0xfd - invalid */
14020
14021
14022/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14023FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14024{
14025 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14026 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
14027}
14028
14029
14030/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14031FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14032{
14033 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14034 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
14035}
14036
14037
14038/* Opcode 0xf2 0x0f 0xfe - invalid */
14039
14040
14041/** Opcode **** 0x0f 0xff - UD0 */
14042FNIEMOP_DEF(iemOp_ud0)
14043{
14044 IEMOP_MNEMONIC(ud0, "ud0");
14045 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14046 {
14047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14048 if (IEM_IS_MODRM_MEM_MODE(bRm))
14049 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14050 }
14051 IEMOP_HLP_DONE_DECODING();
14052 IEMOP_RAISE_INVALID_OPCODE_RET();
14053}
14054
14055
14056
14057/**
14058 * Two byte opcode map, first byte 0x0f.
14059 *
14060 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14061 * check if it needs updating as well when making changes.
14062 */
14063const PFNIEMOP g_apfnTwoByteMap[] =
14064{
14065 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14066 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14067 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14068 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14069 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14070 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14071 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14072 /* 0x06 */ IEMOP_X4(iemOp_clts),
14073 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14074 /* 0x08 */ IEMOP_X4(iemOp_invd),
14075 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14076 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14077 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14078 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14079 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14080 /* 0x0e */ IEMOP_X4(iemOp_femms),
14081 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14082
14083 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14084 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14085 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14086 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14087 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14088 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14089 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14090 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14091 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14092 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14093 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14094 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14095 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14096 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14097 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14098 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14099
14100 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14101 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14102 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14103 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14104 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14105 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14106 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14107 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14108 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14109 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14110 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14111 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14112 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14113 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14114 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14115 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116
14117 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14118 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14119 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14120 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14121 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14122 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14123 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14124 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14125 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14126 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14127 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14128 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14129 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14130 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14131 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14132 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14133
14134 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14135 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14136 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14137 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14138 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14139 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14140 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14141 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14142 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14143 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14144 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14145 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14146 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14147 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14148 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14149 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14150
14151 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14152 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14153 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14154 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14155 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14156 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14157 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14158 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14159 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14160 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14161 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14162 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14163 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14164 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14165 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14166 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14167
14168 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14169 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14170 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14171 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14172 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14173 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14174 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14175 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14176 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14177 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14178 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14179 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14180 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14181 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14182 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14184
14185 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14186 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14187 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14188 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14189 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14191 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14192 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14193
14194 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14195 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14196 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14197 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14198 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14199 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14200 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14201 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14202
14203 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14204 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14205 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14206 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14207 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14208 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14209 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14210 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14211 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14212 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14213 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14214 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14215 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14216 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14217 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14218 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14219
14220 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14221 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14222 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14223 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14224 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14225 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14226 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14227 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14228 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14229 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14230 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14231 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14232 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14233 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14234 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14235 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14236
14237 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14238 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14239 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14240 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14241 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14242 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14243 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14244 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14245 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14246 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14247 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14248 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14249 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14250 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14251 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14252 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14253
14254 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14255 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14256 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14257 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14258 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14259 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14260 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14261 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14262 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14263 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14264 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14265 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14266 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14267 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14268 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14269 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14270
14271 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14272 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14273 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14274 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14275 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14276 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14277 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14278 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14279 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14280 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14281 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14282 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14283 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14284 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14285 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14286 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14287
14288 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14289 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14290 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14291 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14292 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14293 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14294 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14295 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14296 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14297 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14298 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14299 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14300 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14301 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14302 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14303 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304
14305 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14309 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14312 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14317 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14319 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321
14322 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14323 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14326 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14334 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14335 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14336 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14337 /* 0xff */ IEMOP_X4(iemOp_ud0),
14338};
14339AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14340
14341/** @} */
14342
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette