VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 103836

Last change on this file since 103836 was 103778, checked in by vboxsync, 9 months ago

VMM/IEM: Replace IEM_MC_STORE_XREG_HI_U64() with the more versatile IEM_MC_STORE_XREG_U64 and get rid of it, enable IEM_MC_STORE_XREG_U64(), bugref:10614

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 530.5 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 103778 2024-03-11 16:54:27Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1203 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1204 }
1205
1206 /* Ignore operand size here, memory refs are always 16-bit. */
1207 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1229 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1230 }
1231
1232 /* Ignore operand size here, memory refs are always 16-bit. */
1233 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/* Need to associate flag info with the blocks, so duplicate the code. */
1306#define IEMOP_BODY_GRP6_VERX(bRm, fWrite) \
1307 IEMOP_HLP_MIN_286(); \
1308 IEMOP_HLP_NO_REAL_OR_V86_MODE(); \
1309 \
1310 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1311 { \
1312 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0); \
1313 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1314 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1315 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1316 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1317 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1318 IEM_MC_END(); \
1319 } \
1320 else \
1321 { \
1322 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0); \
1323 IEM_MC_ARG(uint16_t, u16Sel, 0); \
1324 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); \
1325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1327 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); \
1328 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1329 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg); \
1330 IEM_MC_END(); \
1331 } (void)0
1332
1333/**
1334 * @opmaps grp6
1335 * @opcode /4
1336 * @opflmodify zf
1337 */
1338FNIEMOPRM_DEF(iemOp_Grp6_verr)
1339{
1340 IEMOP_MNEMONIC(verr, "verr Ew");
1341 IEMOP_BODY_GRP6_VERX(bRm, false);
1342}
1343
1344
1345/**
1346 * @opmaps grp6
1347 * @opcode /5
1348 * @opflmodify zf
1349 */
1350FNIEMOPRM_DEF(iemOp_Grp6_verw)
1351{
1352 IEMOP_MNEMONIC(verw, "verw Ew");
1353 IEMOP_BODY_GRP6_VERX(bRm, true);
1354}
1355
1356
1357/**
1358 * Group 6 jump table.
1359 */
1360IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1361{
1362 iemOp_Grp6_sldt,
1363 iemOp_Grp6_str,
1364 iemOp_Grp6_lldt,
1365 iemOp_Grp6_ltr,
1366 iemOp_Grp6_verr,
1367 iemOp_Grp6_verw,
1368 iemOp_InvalidWithRM,
1369 iemOp_InvalidWithRM
1370};
1371
1372/** Opcode 0x0f 0x00. */
1373FNIEMOP_DEF(iemOp_Grp6)
1374{
1375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1376 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1377}
1378
1379
1380/** Opcode 0x0f 0x01 /0. */
1381FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1382{
1383 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1384 IEMOP_HLP_MIN_286();
1385 IEMOP_HLP_64BIT_OP_SIZE();
1386 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1387 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1390 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1391 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1392 IEM_MC_END();
1393}
1394
1395
1396/** Opcode 0x0f 0x01 /0. */
1397FNIEMOP_DEF(iemOp_Grp7_vmcall)
1398{
1399 IEMOP_MNEMONIC(vmcall, "vmcall");
1400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1401
1402 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1403 want all hypercalls regardless of instruction used, and if a
1404 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1405 (NEM/win makes ASSUMPTIONS about this behavior.) */
1406 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1407}
1408
1409
1410/** Opcode 0x0f 0x01 /0. */
1411#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1412FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1413{
1414 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1415 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1416 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1417 IEMOP_HLP_DONE_DECODING();
1418 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1419 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1420 iemCImpl_vmlaunch);
1421}
1422#else
1423FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1424{
1425 IEMOP_BITCH_ABOUT_STUB();
1426 IEMOP_RAISE_INVALID_OPCODE_RET();
1427}
1428#endif
1429
1430
1431/** Opcode 0x0f 0x01 /0. */
1432#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1433FNIEMOP_DEF(iemOp_Grp7_vmresume)
1434{
1435 IEMOP_MNEMONIC(vmresume, "vmresume");
1436 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1437 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1438 IEMOP_HLP_DONE_DECODING();
1439 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1440 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1441 iemCImpl_vmresume);
1442}
1443#else
1444FNIEMOP_DEF(iemOp_Grp7_vmresume)
1445{
1446 IEMOP_BITCH_ABOUT_STUB();
1447 IEMOP_RAISE_INVALID_OPCODE_RET();
1448}
1449#endif
1450
1451
1452/** Opcode 0x0f 0x01 /0. */
1453#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1454FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1455{
1456 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1457 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1458 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1459 IEMOP_HLP_DONE_DECODING();
1460 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1461}
1462#else
1463FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1464{
1465 IEMOP_BITCH_ABOUT_STUB();
1466 IEMOP_RAISE_INVALID_OPCODE_RET();
1467}
1468#endif
1469
1470
1471/** Opcode 0x0f 0x01 /1. */
1472FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1473{
1474 IEMOP_MNEMONIC(sidt, "sidt Ms");
1475 IEMOP_HLP_MIN_286();
1476 IEMOP_HLP_64BIT_OP_SIZE();
1477 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1478 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1481 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1482 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1483 IEM_MC_END();
1484}
1485
1486
1487/** Opcode 0x0f 0x01 /1. */
1488FNIEMOP_DEF(iemOp_Grp7_monitor)
1489{
1490 IEMOP_MNEMONIC(monitor, "monitor");
1491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1492 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1493}
1494
1495
1496/** Opcode 0x0f 0x01 /1. */
1497FNIEMOP_DEF(iemOp_Grp7_mwait)
1498{
1499 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1501 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1502}
1503
1504
1505/** Opcode 0x0f 0x01 /2. */
1506FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1507{
1508 IEMOP_MNEMONIC(lgdt, "lgdt");
1509 IEMOP_HLP_64BIT_OP_SIZE();
1510 IEM_MC_BEGIN(3, 1, 0, 0);
1511 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1514 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1515 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1516 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1517 IEM_MC_END();
1518}
1519
1520
1521/** Opcode 0x0f 0x01 0xd0. */
1522FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1523{
1524 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1525 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1526 {
1527 /** @todo r=ramshankar: We should use
1528 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1529 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1530/** @todo testcase: test prefixes and exceptions. currently not checking for the
1531 * OPSIZE one ... */
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1533 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1534 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1535 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1536 iemCImpl_xgetbv);
1537 }
1538 IEMOP_RAISE_INVALID_OPCODE_RET();
1539}
1540
1541
1542/** Opcode 0x0f 0x01 0xd1. */
1543FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1544{
1545 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1546 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1547 {
1548 /** @todo r=ramshankar: We should use
1549 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1550 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1551/** @todo testcase: test prefixes and exceptions. currently not checking for the
1552 * OPSIZE one ... */
1553 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1554 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1555 }
1556 IEMOP_RAISE_INVALID_OPCODE_RET();
1557}
1558
1559
1560/** Opcode 0x0f 0x01 /3. */
1561FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1562{
1563 IEMOP_MNEMONIC(lidt, "lidt");
1564 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1565 IEM_MC_BEGIN(3, 1, 0, 0);
1566 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1569 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1570 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1571 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1572 IEM_MC_END();
1573}
1574
1575
1576/** Opcode 0x0f 0x01 0xd8. */
1577#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1578FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1579{
1580 IEMOP_MNEMONIC(vmrun, "vmrun");
1581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1583 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1584 iemCImpl_vmrun);
1585}
1586#else
1587FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1588#endif
1589
1590/** Opcode 0x0f 0x01 0xd9. */
1591FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1592{
1593 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1594 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1595 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1596 * here cannot be right... */
1597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1598
1599 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1600 want all hypercalls regardless of instruction used, and if a
1601 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1602 (NEM/win makes ASSUMPTIONS about this behavior.) */
1603 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1604}
1605
1606/** Opcode 0x0f 0x01 0xda. */
1607#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1608FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1609{
1610 IEMOP_MNEMONIC(vmload, "vmload");
1611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1612 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1613}
1614#else
1615FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1616#endif
1617
1618
1619/** Opcode 0x0f 0x01 0xdb. */
1620#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1621FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1622{
1623 IEMOP_MNEMONIC(vmsave, "vmsave");
1624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1625 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1626}
1627#else
1628FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1629#endif
1630
1631
1632/** Opcode 0x0f 0x01 0xdc. */
1633#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1634FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1635{
1636 IEMOP_MNEMONIC(stgi, "stgi");
1637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1638 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1639}
1640#else
1641FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1642#endif
1643
1644
1645/** Opcode 0x0f 0x01 0xdd. */
1646#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1647FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1648{
1649 IEMOP_MNEMONIC(clgi, "clgi");
1650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1651 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1652}
1653#else
1654FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1655#endif
1656
1657
1658/** Opcode 0x0f 0x01 0xdf. */
1659#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1660FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1661{
1662 IEMOP_MNEMONIC(invlpga, "invlpga");
1663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1664 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1665}
1666#else
1667FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1668#endif
1669
1670
1671/** Opcode 0x0f 0x01 0xde. */
1672#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1673FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1674{
1675 IEMOP_MNEMONIC(skinit, "skinit");
1676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1677 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1678}
1679#else
1680FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1681#endif
1682
1683
1684/** Opcode 0x0f 0x01 /4. */
1685FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1686{
1687 IEMOP_MNEMONIC(smsw, "smsw");
1688 IEMOP_HLP_MIN_286();
1689 if (IEM_IS_MODRM_REG_MODE(bRm))
1690 {
1691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1692 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1693 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1694 }
1695
1696 /* Ignore operand size here, memory refs are always 16-bit. */
1697 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1698 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1701 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1702 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1703 IEM_MC_END();
1704}
1705
1706
1707/** Opcode 0x0f 0x01 /6. */
1708FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1709{
1710 /* The operand size is effectively ignored, all is 16-bit and only the
1711 lower 3-bits are used. */
1712 IEMOP_MNEMONIC(lmsw, "lmsw");
1713 IEMOP_HLP_MIN_286();
1714 if (IEM_IS_MODRM_REG_MODE(bRm))
1715 {
1716 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1718 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1719 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1720 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1721 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1722 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1723 IEM_MC_END();
1724 }
1725 else
1726 {
1727 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1728 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1729 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1732 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1733 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0),
1734 iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1735 IEM_MC_END();
1736 }
1737}
1738
1739
1740/** Opcode 0x0f 0x01 /7. */
1741FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1742{
1743 IEMOP_MNEMONIC(invlpg, "invlpg");
1744 IEMOP_HLP_MIN_486();
1745 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
1746 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1749 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1750 IEM_MC_END();
1751}
1752
1753
1754/** Opcode 0x0f 0x01 0xf8. */
1755FNIEMOP_DEF(iemOp_Grp7_swapgs)
1756{
1757 IEMOP_MNEMONIC(swapgs, "swapgs");
1758 IEMOP_HLP_ONLY_64BIT();
1759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1760 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1761}
1762
1763
1764/** Opcode 0x0f 0x01 0xf9. */
1765FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1766{
1767 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1769 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1770 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1771 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1772 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1773 iemCImpl_rdtscp);
1774}
1775
1776
1777/**
1778 * Group 7 jump table, memory variant.
1779 */
1780IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1781{
1782 iemOp_Grp7_sgdt,
1783 iemOp_Grp7_sidt,
1784 iemOp_Grp7_lgdt,
1785 iemOp_Grp7_lidt,
1786 iemOp_Grp7_smsw,
1787 iemOp_InvalidWithRM,
1788 iemOp_Grp7_lmsw,
1789 iemOp_Grp7_invlpg
1790};
1791
1792
1793/** Opcode 0x0f 0x01. */
1794FNIEMOP_DEF(iemOp_Grp7)
1795{
1796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1797 if (IEM_IS_MODRM_MEM_MODE(bRm))
1798 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1799
1800 switch (IEM_GET_MODRM_REG_8(bRm))
1801 {
1802 case 0:
1803 switch (IEM_GET_MODRM_RM_8(bRm))
1804 {
1805 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1806 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1807 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1808 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1809 }
1810 IEMOP_RAISE_INVALID_OPCODE_RET();
1811
1812 case 1:
1813 switch (IEM_GET_MODRM_RM_8(bRm))
1814 {
1815 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1816 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1817 }
1818 IEMOP_RAISE_INVALID_OPCODE_RET();
1819
1820 case 2:
1821 switch (IEM_GET_MODRM_RM_8(bRm))
1822 {
1823 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1824 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1825 }
1826 IEMOP_RAISE_INVALID_OPCODE_RET();
1827
1828 case 3:
1829 switch (IEM_GET_MODRM_RM_8(bRm))
1830 {
1831 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1832 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1833 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1834 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1835 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1836 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1837 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1838 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1840 }
1841
1842 case 4:
1843 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1844
1845 case 5:
1846 IEMOP_RAISE_INVALID_OPCODE_RET();
1847
1848 case 6:
1849 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1850
1851 case 7:
1852 switch (IEM_GET_MODRM_RM_8(bRm))
1853 {
1854 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1855 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1856 }
1857 IEMOP_RAISE_INVALID_OPCODE_RET();
1858
1859 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1860 }
1861}
1862
1863FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1864{
1865 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1867
1868 if (IEM_IS_MODRM_REG_MODE(bRm))
1869 {
1870 switch (pVCpu->iem.s.enmEffOpSize)
1871 {
1872 case IEMMODE_16BIT:
1873 IEM_MC_BEGIN(3, 0, 0, 0);
1874 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1875 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1876 IEM_MC_ARG(uint16_t, u16Sel, 1);
1877 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1878
1879 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1880 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1882 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1883
1884 IEM_MC_END();
1885 break;
1886
1887 case IEMMODE_32BIT:
1888 case IEMMODE_64BIT:
1889 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
1890 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1892 IEM_MC_ARG(uint16_t, u16Sel, 1);
1893 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1894
1895 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1896 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1897 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1898 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1899
1900 IEM_MC_END();
1901 break;
1902
1903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1904 }
1905 }
1906 else
1907 {
1908 switch (pVCpu->iem.s.enmEffOpSize)
1909 {
1910 case IEMMODE_16BIT:
1911 IEM_MC_BEGIN(3, 1, 0, 0);
1912 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1913 IEM_MC_ARG(uint16_t, u16Sel, 1);
1914 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1916
1917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1918 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1919
1920 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1921 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1922 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1923 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1924
1925 IEM_MC_END();
1926 break;
1927
1928 case IEMMODE_32BIT:
1929 case IEMMODE_64BIT:
1930 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
1931 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1932 IEM_MC_ARG(uint16_t, u16Sel, 1);
1933 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1935
1936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1937 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1938/** @todo testcase: make sure it's a 16-bit read. */
1939
1940 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1941 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1942 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1943 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1944
1945 IEM_MC_END();
1946 break;
1947
1948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1949 }
1950 }
1951}
1952
1953
1954
1955/**
1956 * @opcode 0x02
1957 * @opflmodify zf
1958 */
1959FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1960{
1961 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1962 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1963}
1964
1965
1966/**
1967 * @opcode 0x03
1968 * @opflmodify zf
1969 */
1970FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1971{
1972 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1973 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1974}
1975
1976
1977/** Opcode 0x0f 0x05. */
1978FNIEMOP_DEF(iemOp_syscall)
1979{
1980 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1982 /** @todo r=aeichner Clobbers cr0 only if this is a 286 LOADALL instruction. */
1983 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1984 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1985 RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_syscall);
1986}
1987
1988
1989/** Opcode 0x0f 0x06. */
1990FNIEMOP_DEF(iemOp_clts)
1991{
1992 IEMOP_MNEMONIC(clts, "clts");
1993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1994 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0), iemCImpl_clts);
1995}
1996
1997
1998/** Opcode 0x0f 0x07. */
1999FNIEMOP_DEF(iemOp_sysret)
2000{
2001 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
2002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2003 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
2004 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
2005 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
2006}
2007
2008
2009/** Opcode 0x0f 0x08. */
2010FNIEMOP_DEF(iemOp_invd)
2011{
2012 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
2013 IEMOP_HLP_MIN_486();
2014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2015 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
2016}
2017
2018
2019/** Opcode 0x0f 0x09. */
2020FNIEMOP_DEF(iemOp_wbinvd)
2021{
2022 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2023 IEMOP_HLP_MIN_486();
2024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2025 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
2026}
2027
2028
2029/** Opcode 0x0f 0x0b. */
2030FNIEMOP_DEF(iemOp_ud2)
2031{
2032 IEMOP_MNEMONIC(ud2, "ud2");
2033 IEMOP_RAISE_INVALID_OPCODE_RET();
2034}
2035
2036/** Opcode 0x0f 0x0d. */
2037FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2038{
2039 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2040 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2041 {
2042 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2043 IEMOP_RAISE_INVALID_OPCODE_RET();
2044 }
2045
2046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2047 if (IEM_IS_MODRM_REG_MODE(bRm))
2048 {
2049 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2050 IEMOP_RAISE_INVALID_OPCODE_RET();
2051 }
2052
2053 switch (IEM_GET_MODRM_REG_8(bRm))
2054 {
2055 case 2: /* Aliased to /0 for the time being. */
2056 case 4: /* Aliased to /0 for the time being. */
2057 case 5: /* Aliased to /0 for the time being. */
2058 case 6: /* Aliased to /0 for the time being. */
2059 case 7: /* Aliased to /0 for the time being. */
2060 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2061 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2062 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2064 }
2065
2066 IEM_MC_BEGIN(0, 1, 0, 0);
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2070 /* Currently a NOP. */
2071 IEM_MC_NOREF(GCPtrEffSrc);
2072 IEM_MC_ADVANCE_RIP_AND_FINISH();
2073 IEM_MC_END();
2074}
2075
2076
2077/** Opcode 0x0f 0x0e. */
2078FNIEMOP_DEF(iemOp_femms)
2079{
2080 IEMOP_MNEMONIC(femms, "femms");
2081
2082 IEM_MC_BEGIN(0, 0, 0, 0);
2083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2086 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2087 IEM_MC_FPU_FROM_MMX_MODE();
2088 IEM_MC_ADVANCE_RIP_AND_FINISH();
2089 IEM_MC_END();
2090}
2091
2092
2093/** Opcode 0x0f 0x0f. */
2094FNIEMOP_DEF(iemOp_3Dnow)
2095{
2096 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2097 {
2098 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2099 IEMOP_RAISE_INVALID_OPCODE_RET();
2100 }
2101
2102#ifdef IEM_WITH_3DNOW
2103 /* This is pretty sparse, use switch instead of table. */
2104 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2105 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2106#else
2107 IEMOP_BITCH_ABOUT_STUB();
2108 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2109#endif
2110}
2111
2112
2113/**
2114 * @opcode 0x10
2115 * @oppfx none
2116 * @opcpuid sse
2117 * @opgroup og_sse_simdfp_datamove
2118 * @opxcpttype 4UA
2119 * @optest op1=1 op2=2 -> op1=2
2120 * @optest op1=0 op2=-22 -> op1=-22
2121 */
2122FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2123{
2124 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2126 if (IEM_IS_MODRM_REG_MODE(bRm))
2127 {
2128 /*
2129 * XMM128, XMM128.
2130 */
2131 IEM_MC_BEGIN(0, 0, 0, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2136 IEM_GET_MODRM_RM(pVCpu, bRm));
2137 IEM_MC_ADVANCE_RIP_AND_FINISH();
2138 IEM_MC_END();
2139 }
2140 else
2141 {
2142 /*
2143 * XMM128, [mem128].
2144 */
2145 IEM_MC_BEGIN(0, 2, 0, 0);
2146 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2148
2149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2152 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2153
2154 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2155 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2156
2157 IEM_MC_ADVANCE_RIP_AND_FINISH();
2158 IEM_MC_END();
2159 }
2160
2161}
2162
2163
2164/**
2165 * @opcode 0x10
2166 * @oppfx 0x66
2167 * @opcpuid sse2
2168 * @opgroup og_sse2_pcksclr_datamove
2169 * @opxcpttype 4UA
2170 * @optest op1=1 op2=2 -> op1=2
2171 * @optest op1=0 op2=-42 -> op1=-42
2172 */
2173FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2174{
2175 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2177 if (IEM_IS_MODRM_REG_MODE(bRm))
2178 {
2179 /*
2180 * XMM128, XMM128.
2181 */
2182 IEM_MC_BEGIN(0, 0, 0, 0);
2183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2184 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2185 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2186 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2187 IEM_GET_MODRM_RM(pVCpu, bRm));
2188 IEM_MC_ADVANCE_RIP_AND_FINISH();
2189 IEM_MC_END();
2190 }
2191 else
2192 {
2193 /*
2194 * XMM128, [mem128].
2195 */
2196 IEM_MC_BEGIN(0, 2, 0, 0);
2197 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2199
2200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2202 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2204
2205 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2206 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2207
2208 IEM_MC_ADVANCE_RIP_AND_FINISH();
2209 IEM_MC_END();
2210 }
2211}
2212
2213
2214/**
2215 * @opcode 0x10
2216 * @oppfx 0xf3
2217 * @opcpuid sse
2218 * @opgroup og_sse_simdfp_datamove
2219 * @opxcpttype 5
2220 * @optest op1=1 op2=2 -> op1=2
2221 * @optest op1=0 op2=-22 -> op1=-22
2222 */
2223FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2224{
2225 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2227 if (IEM_IS_MODRM_REG_MODE(bRm))
2228 {
2229 /*
2230 * XMM32, XMM32.
2231 */
2232 IEM_MC_BEGIN(0, 1, 0, 0);
2233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2234 IEM_MC_LOCAL(uint32_t, uSrc);
2235
2236 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2237 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2238 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2239 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2240
2241 IEM_MC_ADVANCE_RIP_AND_FINISH();
2242 IEM_MC_END();
2243 }
2244 else
2245 {
2246 /*
2247 * XMM128, [mem32].
2248 */
2249 IEM_MC_BEGIN(0, 2, 0, 0);
2250 IEM_MC_LOCAL(uint32_t, uSrc);
2251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2252
2253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2255 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2256 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2257
2258 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2259 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2260
2261 IEM_MC_ADVANCE_RIP_AND_FINISH();
2262 IEM_MC_END();
2263 }
2264}
2265
2266
2267/**
2268 * @opcode 0x10
2269 * @oppfx 0xf2
2270 * @opcpuid sse2
2271 * @opgroup og_sse2_pcksclr_datamove
2272 * @opxcpttype 5
2273 * @optest op1=1 op2=2 -> op1=2
2274 * @optest op1=0 op2=-42 -> op1=-42
2275 */
2276FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2277{
2278 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2280 if (IEM_IS_MODRM_REG_MODE(bRm))
2281 {
2282 /*
2283 * XMM64, XMM64.
2284 */
2285 IEM_MC_BEGIN(0, 1, 0, 0);
2286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2287 IEM_MC_LOCAL(uint64_t, uSrc);
2288
2289 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2290 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2291 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2292 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2293
2294 IEM_MC_ADVANCE_RIP_AND_FINISH();
2295 IEM_MC_END();
2296 }
2297 else
2298 {
2299 /*
2300 * XMM128, [mem64].
2301 */
2302 IEM_MC_BEGIN(0, 2, 0, 0);
2303 IEM_MC_LOCAL(uint64_t, uSrc);
2304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2305
2306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2308 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2309 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2310
2311 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2312 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2313
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317}
2318
2319
2320/**
2321 * @opcode 0x11
2322 * @oppfx none
2323 * @opcpuid sse
2324 * @opgroup og_sse_simdfp_datamove
2325 * @opxcpttype 4UA
2326 * @optest op1=1 op2=2 -> op1=2
2327 * @optest op1=0 op2=-42 -> op1=-42
2328 */
2329FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2330{
2331 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2333 if (IEM_IS_MODRM_REG_MODE(bRm))
2334 {
2335 /*
2336 * XMM128, XMM128.
2337 */
2338 IEM_MC_BEGIN(0, 0, 0, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2340 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2341 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2342 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2343 IEM_GET_MODRM_REG(pVCpu, bRm));
2344 IEM_MC_ADVANCE_RIP_AND_FINISH();
2345 IEM_MC_END();
2346 }
2347 else
2348 {
2349 /*
2350 * [mem128], XMM128.
2351 */
2352 IEM_MC_BEGIN(0, 2, 0, 0);
2353 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2355
2356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2358 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2359 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2360
2361 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2362 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2363
2364 IEM_MC_ADVANCE_RIP_AND_FINISH();
2365 IEM_MC_END();
2366 }
2367}
2368
2369
2370/**
2371 * @opcode 0x11
2372 * @oppfx 0x66
2373 * @opcpuid sse2
2374 * @opgroup og_sse2_pcksclr_datamove
2375 * @opxcpttype 4UA
2376 * @optest op1=1 op2=2 -> op1=2
2377 * @optest op1=0 op2=-42 -> op1=-42
2378 */
2379FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2380{
2381 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2383 if (IEM_IS_MODRM_REG_MODE(bRm))
2384 {
2385 /*
2386 * XMM128, XMM128.
2387 */
2388 IEM_MC_BEGIN(0, 0, 0, 0);
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2390 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2391 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2392 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2393 IEM_GET_MODRM_REG(pVCpu, bRm));
2394 IEM_MC_ADVANCE_RIP_AND_FINISH();
2395 IEM_MC_END();
2396 }
2397 else
2398 {
2399 /*
2400 * [mem128], XMM128.
2401 */
2402 IEM_MC_BEGIN(0, 2, 0, 0);
2403 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2405
2406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2408 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2409 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2410
2411 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2412 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2413
2414 IEM_MC_ADVANCE_RIP_AND_FINISH();
2415 IEM_MC_END();
2416 }
2417}
2418
2419
2420/**
2421 * @opcode 0x11
2422 * @oppfx 0xf3
2423 * @opcpuid sse
2424 * @opgroup og_sse_simdfp_datamove
2425 * @opxcpttype 5
2426 * @optest op1=1 op2=2 -> op1=2
2427 * @optest op1=0 op2=-22 -> op1=-22
2428 */
2429FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2430{
2431 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2433 if (IEM_IS_MODRM_REG_MODE(bRm))
2434 {
2435 /*
2436 * XMM32, XMM32.
2437 */
2438 IEM_MC_BEGIN(0, 1, 0, 0);
2439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2440 IEM_MC_LOCAL(uint32_t, uSrc);
2441
2442 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2443 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2444 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2445 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2446
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 }
2450 else
2451 {
2452 /*
2453 * [mem32], XMM32.
2454 */
2455 IEM_MC_BEGIN(0, 2, 0, 0);
2456 IEM_MC_LOCAL(uint32_t, uSrc);
2457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2458
2459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2461 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2463
2464 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2465 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2466
2467 IEM_MC_ADVANCE_RIP_AND_FINISH();
2468 IEM_MC_END();
2469 }
2470}
2471
2472
2473/**
2474 * @opcode 0x11
2475 * @oppfx 0xf2
2476 * @opcpuid sse2
2477 * @opgroup og_sse2_pcksclr_datamove
2478 * @opxcpttype 5
2479 * @optest op1=1 op2=2 -> op1=2
2480 * @optest op1=0 op2=-42 -> op1=-42
2481 */
2482FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2483{
2484 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2485 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2486 if (IEM_IS_MODRM_REG_MODE(bRm))
2487 {
2488 /*
2489 * XMM64, XMM64.
2490 */
2491 IEM_MC_BEGIN(0, 1, 0, 0);
2492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2493 IEM_MC_LOCAL(uint64_t, uSrc);
2494
2495 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2497 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2498 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2499
2500 IEM_MC_ADVANCE_RIP_AND_FINISH();
2501 IEM_MC_END();
2502 }
2503 else
2504 {
2505 /*
2506 * [mem64], XMM64.
2507 */
2508 IEM_MC_BEGIN(0, 2, 0, 0);
2509 IEM_MC_LOCAL(uint64_t, uSrc);
2510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2511
2512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2514 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2515 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2516
2517 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2518 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2519
2520 IEM_MC_ADVANCE_RIP_AND_FINISH();
2521 IEM_MC_END();
2522 }
2523}
2524
2525
2526FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2527{
2528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2529 if (IEM_IS_MODRM_REG_MODE(bRm))
2530 {
2531 /**
2532 * @opcode 0x12
2533 * @opcodesub 11 mr/reg
2534 * @oppfx none
2535 * @opcpuid sse
2536 * @opgroup og_sse_simdfp_datamove
2537 * @opxcpttype 5
2538 * @optest op1=1 op2=2 -> op1=2
2539 * @optest op1=0 op2=-42 -> op1=-42
2540 */
2541 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2542
2543 IEM_MC_BEGIN(0, 1, 0, 0);
2544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2545 IEM_MC_LOCAL(uint64_t, uSrc);
2546
2547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2549 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2550 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2551
2552 IEM_MC_ADVANCE_RIP_AND_FINISH();
2553 IEM_MC_END();
2554 }
2555 else
2556 {
2557 /**
2558 * @opdone
2559 * @opcode 0x12
2560 * @opcodesub !11 mr/reg
2561 * @oppfx none
2562 * @opcpuid sse
2563 * @opgroup og_sse_simdfp_datamove
2564 * @opxcpttype 5
2565 * @optest op1=1 op2=2 -> op1=2
2566 * @optest op1=0 op2=-42 -> op1=-42
2567 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2568 */
2569 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2570
2571 IEM_MC_BEGIN(0, 2, 0, 0);
2572 IEM_MC_LOCAL(uint64_t, uSrc);
2573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2574
2575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2577 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2579
2580 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2581 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2582
2583 IEM_MC_ADVANCE_RIP_AND_FINISH();
2584 IEM_MC_END();
2585 }
2586}
2587
2588
2589/**
2590 * @opcode 0x12
2591 * @opcodesub !11 mr/reg
2592 * @oppfx 0x66
2593 * @opcpuid sse2
2594 * @opgroup og_sse2_pcksclr_datamove
2595 * @opxcpttype 5
2596 * @optest op1=1 op2=2 -> op1=2
2597 * @optest op1=0 op2=-42 -> op1=-42
2598 */
2599FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2600{
2601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2602 if (IEM_IS_MODRM_MEM_MODE(bRm))
2603 {
2604 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2605
2606 IEM_MC_BEGIN(0, 2, 0, 0);
2607 IEM_MC_LOCAL(uint64_t, uSrc);
2608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2609
2610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2613 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2614
2615 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2616 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2617
2618 IEM_MC_ADVANCE_RIP_AND_FINISH();
2619 IEM_MC_END();
2620 }
2621
2622 /**
2623 * @opdone
2624 * @opmnemonic ud660f12m3
2625 * @opcode 0x12
2626 * @opcodesub 11 mr/reg
2627 * @oppfx 0x66
2628 * @opunused immediate
2629 * @opcpuid sse
2630 * @optest ->
2631 */
2632 else
2633 IEMOP_RAISE_INVALID_OPCODE_RET();
2634}
2635
2636
2637/**
2638 * @opcode 0x12
2639 * @oppfx 0xf3
2640 * @opcpuid sse3
2641 * @opgroup og_sse3_pcksclr_datamove
2642 * @opxcpttype 4
2643 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2644 * op1=0x00000002000000020000000100000001
2645 */
2646FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2647{
2648 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2650 if (IEM_IS_MODRM_REG_MODE(bRm))
2651 {
2652 /*
2653 * XMM, XMM.
2654 */
2655 IEM_MC_BEGIN(0, 1, 0, 0);
2656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2657 IEM_MC_LOCAL(RTUINT128U, uSrc);
2658
2659 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2660 IEM_MC_PREPARE_SSE_USAGE();
2661
2662 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2663 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2664 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2665 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2666 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2667
2668 IEM_MC_ADVANCE_RIP_AND_FINISH();
2669 IEM_MC_END();
2670 }
2671 else
2672 {
2673 /*
2674 * XMM, [mem128].
2675 */
2676 IEM_MC_BEGIN(0, 2, 0, 0);
2677 IEM_MC_LOCAL(RTUINT128U, uSrc);
2678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2679
2680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2682 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2683 IEM_MC_PREPARE_SSE_USAGE();
2684
2685 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2686 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2687 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2688 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2689 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2690
2691 IEM_MC_ADVANCE_RIP_AND_FINISH();
2692 IEM_MC_END();
2693 }
2694}
2695
2696
2697/**
2698 * @opcode 0x12
2699 * @oppfx 0xf2
2700 * @opcpuid sse3
2701 * @opgroup og_sse3_pcksclr_datamove
2702 * @opxcpttype 5
2703 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2704 * op1=0x22222222111111112222222211111111
2705 */
2706FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2707{
2708 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2710 if (IEM_IS_MODRM_REG_MODE(bRm))
2711 {
2712 /*
2713 * XMM128, XMM64.
2714 */
2715 IEM_MC_BEGIN(0, 1, 0, 0);
2716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2717 IEM_MC_LOCAL(uint64_t, uSrc);
2718
2719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2720 IEM_MC_PREPARE_SSE_USAGE();
2721
2722 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2723 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2724 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2725
2726 IEM_MC_ADVANCE_RIP_AND_FINISH();
2727 IEM_MC_END();
2728 }
2729 else
2730 {
2731 /*
2732 * XMM128, [mem64].
2733 */
2734 IEM_MC_BEGIN(0, 2, 0, 0);
2735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2736 IEM_MC_LOCAL(uint64_t, uSrc);
2737
2738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2740 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2741 IEM_MC_PREPARE_SSE_USAGE();
2742
2743 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2744 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2745 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
2746
2747 IEM_MC_ADVANCE_RIP_AND_FINISH();
2748 IEM_MC_END();
2749 }
2750}
2751
2752
2753/**
2754 * @opcode 0x13
2755 * @opcodesub !11 mr/reg
2756 * @oppfx none
2757 * @opcpuid sse
2758 * @opgroup og_sse_simdfp_datamove
2759 * @opxcpttype 5
2760 * @optest op1=1 op2=2 -> op1=2
2761 * @optest op1=0 op2=-42 -> op1=-42
2762 */
2763FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2764{
2765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2766 if (IEM_IS_MODRM_MEM_MODE(bRm))
2767 {
2768 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2769
2770 IEM_MC_BEGIN(0, 2, 0, 0);
2771 IEM_MC_LOCAL(uint64_t, uSrc);
2772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2773
2774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2777 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2778
2779 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2780 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2781
2782 IEM_MC_ADVANCE_RIP_AND_FINISH();
2783 IEM_MC_END();
2784 }
2785
2786 /**
2787 * @opdone
2788 * @opmnemonic ud0f13m3
2789 * @opcode 0x13
2790 * @opcodesub 11 mr/reg
2791 * @oppfx none
2792 * @opunused immediate
2793 * @opcpuid sse
2794 * @optest ->
2795 */
2796 else
2797 IEMOP_RAISE_INVALID_OPCODE_RET();
2798}
2799
2800
2801/**
2802 * @opcode 0x13
2803 * @opcodesub !11 mr/reg
2804 * @oppfx 0x66
2805 * @opcpuid sse2
2806 * @opgroup og_sse2_pcksclr_datamove
2807 * @opxcpttype 5
2808 * @optest op1=1 op2=2 -> op1=2
2809 * @optest op1=0 op2=-42 -> op1=-42
2810 */
2811FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2812{
2813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2814 if (IEM_IS_MODRM_MEM_MODE(bRm))
2815 {
2816 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2817
2818 IEM_MC_BEGIN(0, 2, 0, 0);
2819 IEM_MC_LOCAL(uint64_t, uSrc);
2820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2821
2822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2824 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2825 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2826
2827 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2828 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2829
2830 IEM_MC_ADVANCE_RIP_AND_FINISH();
2831 IEM_MC_END();
2832 }
2833
2834 /**
2835 * @opdone
2836 * @opmnemonic ud660f13m3
2837 * @opcode 0x13
2838 * @opcodesub 11 mr/reg
2839 * @oppfx 0x66
2840 * @opunused immediate
2841 * @opcpuid sse
2842 * @optest ->
2843 */
2844 else
2845 IEMOP_RAISE_INVALID_OPCODE_RET();
2846}
2847
2848
2849/**
2850 * @opmnemonic udf30f13
2851 * @opcode 0x13
2852 * @oppfx 0xf3
2853 * @opunused intel-modrm
2854 * @opcpuid sse
2855 * @optest ->
2856 * @opdone
2857 */
2858
2859/**
2860 * @opmnemonic udf20f13
2861 * @opcode 0x13
2862 * @oppfx 0xf2
2863 * @opunused intel-modrm
2864 * @opcpuid sse
2865 * @optest ->
2866 * @opdone
2867 */
2868
2869/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2870FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2871{
2872 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2873 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2874}
2875
2876
2877/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2878FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2879{
2880 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2881 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2882}
2883
2884
2885/**
2886 * @opdone
2887 * @opmnemonic udf30f14
2888 * @opcode 0x14
2889 * @oppfx 0xf3
2890 * @opunused intel-modrm
2891 * @opcpuid sse
2892 * @optest ->
2893 * @opdone
2894 */
2895
2896/**
2897 * @opmnemonic udf20f14
2898 * @opcode 0x14
2899 * @oppfx 0xf2
2900 * @opunused intel-modrm
2901 * @opcpuid sse
2902 * @optest ->
2903 * @opdone
2904 */
2905
2906/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2907FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2908{
2909 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2910 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2911}
2912
2913
2914/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2915FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2916{
2917 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2918 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2919}
2920
2921
2922/* Opcode 0xf3 0x0f 0x15 - invalid */
2923/* Opcode 0xf2 0x0f 0x15 - invalid */
2924
2925/**
2926 * @opdone
2927 * @opmnemonic udf30f15
2928 * @opcode 0x15
2929 * @oppfx 0xf3
2930 * @opunused intel-modrm
2931 * @opcpuid sse
2932 * @optest ->
2933 * @opdone
2934 */
2935
2936/**
2937 * @opmnemonic udf20f15
2938 * @opcode 0x15
2939 * @oppfx 0xf2
2940 * @opunused intel-modrm
2941 * @opcpuid sse
2942 * @optest ->
2943 * @opdone
2944 */
2945
2946FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2947{
2948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2949 if (IEM_IS_MODRM_REG_MODE(bRm))
2950 {
2951 /**
2952 * @opcode 0x16
2953 * @opcodesub 11 mr/reg
2954 * @oppfx none
2955 * @opcpuid sse
2956 * @opgroup og_sse_simdfp_datamove
2957 * @opxcpttype 5
2958 * @optest op1=1 op2=2 -> op1=2
2959 * @optest op1=0 op2=-42 -> op1=-42
2960 */
2961 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2962
2963 IEM_MC_BEGIN(0, 1, 0, 0);
2964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2965 IEM_MC_LOCAL(uint64_t, uSrc);
2966
2967 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2968 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2969 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2970 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
2971
2972 IEM_MC_ADVANCE_RIP_AND_FINISH();
2973 IEM_MC_END();
2974 }
2975 else
2976 {
2977 /**
2978 * @opdone
2979 * @opcode 0x16
2980 * @opcodesub !11 mr/reg
2981 * @oppfx none
2982 * @opcpuid sse
2983 * @opgroup og_sse_simdfp_datamove
2984 * @opxcpttype 5
2985 * @optest op1=1 op2=2 -> op1=2
2986 * @optest op1=0 op2=-42 -> op1=-42
2987 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2988 */
2989 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2990
2991 IEM_MC_BEGIN(0, 2, 0, 0);
2992 IEM_MC_LOCAL(uint64_t, uSrc);
2993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2994
2995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2997 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2998 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2999
3000 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3001 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
3002
3003 IEM_MC_ADVANCE_RIP_AND_FINISH();
3004 IEM_MC_END();
3005 }
3006}
3007
3008
3009/**
3010 * @opcode 0x16
3011 * @opcodesub !11 mr/reg
3012 * @oppfx 0x66
3013 * @opcpuid sse2
3014 * @opgroup og_sse2_pcksclr_datamove
3015 * @opxcpttype 5
3016 * @optest op1=1 op2=2 -> op1=2
3017 * @optest op1=0 op2=-42 -> op1=-42
3018 */
3019FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3020{
3021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3022 if (IEM_IS_MODRM_MEM_MODE(bRm))
3023 {
3024 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3025
3026 IEM_MC_BEGIN(0, 2, 0, 0);
3027 IEM_MC_LOCAL(uint64_t, uSrc);
3028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3029
3030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3032 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3033 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3034
3035 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3036 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQword*/, uSrc);
3037
3038 IEM_MC_ADVANCE_RIP_AND_FINISH();
3039 IEM_MC_END();
3040 }
3041
3042 /**
3043 * @opdone
3044 * @opmnemonic ud660f16m3
3045 * @opcode 0x16
3046 * @opcodesub 11 mr/reg
3047 * @oppfx 0x66
3048 * @opunused immediate
3049 * @opcpuid sse
3050 * @optest ->
3051 */
3052 else
3053 IEMOP_RAISE_INVALID_OPCODE_RET();
3054}
3055
3056
3057/**
3058 * @opcode 0x16
3059 * @oppfx 0xf3
3060 * @opcpuid sse3
3061 * @opgroup og_sse3_pcksclr_datamove
3062 * @opxcpttype 4
3063 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3064 * op1=0x00000002000000020000000100000001
3065 */
3066FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3067{
3068 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3070 if (IEM_IS_MODRM_REG_MODE(bRm))
3071 {
3072 /*
3073 * XMM128, XMM128.
3074 */
3075 IEM_MC_BEGIN(0, 1, 0, 0);
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3077 IEM_MC_LOCAL(RTUINT128U, uSrc);
3078
3079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3080 IEM_MC_PREPARE_SSE_USAGE();
3081
3082 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3083 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3084 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3085 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3086 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3087
3088 IEM_MC_ADVANCE_RIP_AND_FINISH();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /*
3094 * XMM128, [mem128].
3095 */
3096 IEM_MC_BEGIN(0, 2, 0, 0);
3097 IEM_MC_LOCAL(RTUINT128U, uSrc);
3098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3099
3100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3103 IEM_MC_PREPARE_SSE_USAGE();
3104
3105 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3106 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3107 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3108 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3109 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3110
3111 IEM_MC_ADVANCE_RIP_AND_FINISH();
3112 IEM_MC_END();
3113 }
3114}
3115
3116/**
3117 * @opdone
3118 * @opmnemonic udf30f16
3119 * @opcode 0x16
3120 * @oppfx 0xf2
3121 * @opunused intel-modrm
3122 * @opcpuid sse
3123 * @optest ->
3124 * @opdone
3125 */
3126
3127
3128/**
3129 * @opcode 0x17
3130 * @opcodesub !11 mr/reg
3131 * @oppfx none
3132 * @opcpuid sse
3133 * @opgroup og_sse_simdfp_datamove
3134 * @opxcpttype 5
3135 * @optest op1=1 op2=2 -> op1=2
3136 * @optest op1=0 op2=-42 -> op1=-42
3137 */
3138FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3139{
3140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3141 if (IEM_IS_MODRM_MEM_MODE(bRm))
3142 {
3143 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3144
3145 IEM_MC_BEGIN(0, 2, 0, 0);
3146 IEM_MC_LOCAL(uint64_t, uSrc);
3147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3148
3149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3152 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3153
3154 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3155 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3156
3157 IEM_MC_ADVANCE_RIP_AND_FINISH();
3158 IEM_MC_END();
3159 }
3160
3161 /**
3162 * @opdone
3163 * @opmnemonic ud0f17m3
3164 * @opcode 0x17
3165 * @opcodesub 11 mr/reg
3166 * @oppfx none
3167 * @opunused immediate
3168 * @opcpuid sse
3169 * @optest ->
3170 */
3171 else
3172 IEMOP_RAISE_INVALID_OPCODE_RET();
3173}
3174
3175
3176/**
3177 * @opcode 0x17
3178 * @opcodesub !11 mr/reg
3179 * @oppfx 0x66
3180 * @opcpuid sse2
3181 * @opgroup og_sse2_pcksclr_datamove
3182 * @opxcpttype 5
3183 * @optest op1=1 op2=2 -> op1=2
3184 * @optest op1=0 op2=-42 -> op1=-42
3185 */
3186FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3187{
3188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3189 if (IEM_IS_MODRM_MEM_MODE(bRm))
3190 {
3191 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3192
3193 IEM_MC_BEGIN(0, 2, 0, 0);
3194 IEM_MC_LOCAL(uint64_t, uSrc);
3195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3196
3197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3199 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3200 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3201
3202 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3203 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3204
3205 IEM_MC_ADVANCE_RIP_AND_FINISH();
3206 IEM_MC_END();
3207 }
3208
3209 /**
3210 * @opdone
3211 * @opmnemonic ud660f17m3
3212 * @opcode 0x17
3213 * @opcodesub 11 mr/reg
3214 * @oppfx 0x66
3215 * @opunused immediate
3216 * @opcpuid sse
3217 * @optest ->
3218 */
3219 else
3220 IEMOP_RAISE_INVALID_OPCODE_RET();
3221}
3222
3223
3224/**
3225 * @opdone
3226 * @opmnemonic udf30f17
3227 * @opcode 0x17
3228 * @oppfx 0xf3
3229 * @opunused intel-modrm
3230 * @opcpuid sse
3231 * @optest ->
3232 * @opdone
3233 */
3234
3235/**
3236 * @opmnemonic udf20f17
3237 * @opcode 0x17
3238 * @oppfx 0xf2
3239 * @opunused intel-modrm
3240 * @opcpuid sse
3241 * @optest ->
3242 * @opdone
3243 */
3244
3245
3246/** Opcode 0x0f 0x18. */
3247FNIEMOP_DEF(iemOp_prefetch_Grp16)
3248{
3249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3250 if (IEM_IS_MODRM_MEM_MODE(bRm))
3251 {
3252 switch (IEM_GET_MODRM_REG_8(bRm))
3253 {
3254 case 4: /* Aliased to /0 for the time being according to AMD. */
3255 case 5: /* Aliased to /0 for the time being according to AMD. */
3256 case 6: /* Aliased to /0 for the time being according to AMD. */
3257 case 7: /* Aliased to /0 for the time being according to AMD. */
3258 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3259 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3260 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3261 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3263 }
3264
3265 IEM_MC_BEGIN(0, 1, 0, 0);
3266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3269 /* Currently a NOP. */
3270 IEM_MC_NOREF(GCPtrEffSrc);
3271 IEM_MC_ADVANCE_RIP_AND_FINISH();
3272 IEM_MC_END();
3273 }
3274 else
3275 IEMOP_RAISE_INVALID_OPCODE_RET();
3276}
3277
3278
3279/** Opcode 0x0f 0x19..0x1f. */
3280FNIEMOP_DEF(iemOp_nop_Ev)
3281{
3282 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3284 if (IEM_IS_MODRM_REG_MODE(bRm))
3285 {
3286 IEM_MC_BEGIN(0, 0, 0, 0);
3287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3288 IEM_MC_ADVANCE_RIP_AND_FINISH();
3289 IEM_MC_END();
3290 }
3291 else
3292 {
3293 IEM_MC_BEGIN(0, 1, 0, 0);
3294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3297 /* Currently a NOP. */
3298 IEM_MC_NOREF(GCPtrEffSrc);
3299 IEM_MC_ADVANCE_RIP_AND_FINISH();
3300 IEM_MC_END();
3301 }
3302}
3303
3304
3305/** Opcode 0x0f 0x20. */
3306FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3307{
3308 /* mod is ignored, as is operand size overrides. */
3309/** @todo testcase: check memory encoding. */
3310 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3311 IEMOP_HLP_MIN_386();
3312 if (IEM_IS_64BIT_CODE(pVCpu))
3313 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3314 else
3315 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3316
3317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3318 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3319 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3320 {
3321 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3322 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3323 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3324 iCrReg |= 8;
3325 }
3326 switch (iCrReg)
3327 {
3328 case 0: case 2: case 3: case 4: case 8:
3329 break;
3330 default:
3331 IEMOP_RAISE_INVALID_OPCODE_RET();
3332 }
3333 IEMOP_HLP_DONE_DECODING();
3334
3335 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3336 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3337 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3338}
3339
3340
3341/** Opcode 0x0f 0x21. */
3342FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3343{
3344/** @todo testcase: check memory encoding. */
3345 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3346 IEMOP_HLP_MIN_386();
3347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3349 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3350 IEMOP_RAISE_INVALID_OPCODE_RET();
3351 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3352 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3353 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3354}
3355
3356
3357/** Opcode 0x0f 0x22. */
3358FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3359{
3360 /* mod is ignored, as is operand size overrides. */
3361 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3362 IEMOP_HLP_MIN_386();
3363 if (IEM_IS_64BIT_CODE(pVCpu))
3364 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3365 else
3366 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3367
3368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3369 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3370 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3371 {
3372 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3373 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3374 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3375 iCrReg |= 8;
3376 }
3377 switch (iCrReg)
3378 {
3379 case 0: case 2: case 3: case 4: case 8:
3380 break;
3381 default:
3382 IEMOP_RAISE_INVALID_OPCODE_RET();
3383 }
3384 IEMOP_HLP_DONE_DECODING();
3385
3386 /** @todo r=aeichner Split this up as flushing the cr0 is excessive for crX != 0? */
3387 if (iCrReg & (2 | 8))
3388 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3389 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3390 else
3391 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_Cr0) | RT_BIT_64(kIemNativeGstReg_Cr4),
3392 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3393}
3394
3395
3396/** Opcode 0x0f 0x23. */
3397FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3398{
3399 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3400 IEMOP_HLP_MIN_386();
3401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3404 IEMOP_RAISE_INVALID_OPCODE_RET();
3405 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3406 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3407}
3408
3409
3410/** Opcode 0x0f 0x24. */
3411FNIEMOP_DEF(iemOp_mov_Rd_Td)
3412{
3413 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3414 IEMOP_HLP_MIN_386();
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3417 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3418 IEMOP_RAISE_INVALID_OPCODE_RET();
3419 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3420 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3421}
3422
3423
3424/** Opcode 0x0f 0x26. */
3425FNIEMOP_DEF(iemOp_mov_Td_Rd)
3426{
3427 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3428 IEMOP_HLP_MIN_386();
3429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3431 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3432 IEMOP_RAISE_INVALID_OPCODE_RET();
3433 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3434}
3435
3436
3437/**
3438 * @opcode 0x28
3439 * @oppfx none
3440 * @opcpuid sse
3441 * @opgroup og_sse_simdfp_datamove
3442 * @opxcpttype 1
3443 * @optest op1=1 op2=2 -> op1=2
3444 * @optest op1=0 op2=-42 -> op1=-42
3445 */
3446FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3447{
3448 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3450 if (IEM_IS_MODRM_REG_MODE(bRm))
3451 {
3452 /*
3453 * Register, register.
3454 */
3455 IEM_MC_BEGIN(0, 0, 0, 0);
3456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3457 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3458 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3459 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3460 IEM_GET_MODRM_RM(pVCpu, bRm));
3461 IEM_MC_ADVANCE_RIP_AND_FINISH();
3462 IEM_MC_END();
3463 }
3464 else
3465 {
3466 /*
3467 * Register, memory.
3468 */
3469 IEM_MC_BEGIN(0, 2, 0, 0);
3470 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3472
3473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3477
3478 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3479 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3480
3481 IEM_MC_ADVANCE_RIP_AND_FINISH();
3482 IEM_MC_END();
3483 }
3484}
3485
3486/**
3487 * @opcode 0x28
3488 * @oppfx 66
3489 * @opcpuid sse2
3490 * @opgroup og_sse2_pcksclr_datamove
3491 * @opxcpttype 1
3492 * @optest op1=1 op2=2 -> op1=2
3493 * @optest op1=0 op2=-42 -> op1=-42
3494 */
3495FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3496{
3497 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3499 if (IEM_IS_MODRM_REG_MODE(bRm))
3500 {
3501 /*
3502 * Register, register.
3503 */
3504 IEM_MC_BEGIN(0, 0, 0, 0);
3505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3508 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3509 IEM_GET_MODRM_RM(pVCpu, bRm));
3510 IEM_MC_ADVANCE_RIP_AND_FINISH();
3511 IEM_MC_END();
3512 }
3513 else
3514 {
3515 /*
3516 * Register, memory.
3517 */
3518 IEM_MC_BEGIN(0, 2, 0, 0);
3519 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3521
3522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3525 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3526
3527 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3528 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3529
3530 IEM_MC_ADVANCE_RIP_AND_FINISH();
3531 IEM_MC_END();
3532 }
3533}
3534
3535/* Opcode 0xf3 0x0f 0x28 - invalid */
3536/* Opcode 0xf2 0x0f 0x28 - invalid */
3537
3538/**
3539 * @opcode 0x29
3540 * @oppfx none
3541 * @opcpuid sse
3542 * @opgroup og_sse_simdfp_datamove
3543 * @opxcpttype 1
3544 * @optest op1=1 op2=2 -> op1=2
3545 * @optest op1=0 op2=-42 -> op1=-42
3546 */
3547FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3548{
3549 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3551 if (IEM_IS_MODRM_REG_MODE(bRm))
3552 {
3553 /*
3554 * Register, register.
3555 */
3556 IEM_MC_BEGIN(0, 0, 0, 0);
3557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3558 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3559 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3560 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3561 IEM_GET_MODRM_REG(pVCpu, bRm));
3562 IEM_MC_ADVANCE_RIP_AND_FINISH();
3563 IEM_MC_END();
3564 }
3565 else
3566 {
3567 /*
3568 * Memory, register.
3569 */
3570 IEM_MC_BEGIN(0, 2, 0, 0);
3571 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3573
3574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3576 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3577 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3578
3579 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3580 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3581
3582 IEM_MC_ADVANCE_RIP_AND_FINISH();
3583 IEM_MC_END();
3584 }
3585}
3586
3587/**
3588 * @opcode 0x29
3589 * @oppfx 66
3590 * @opcpuid sse2
3591 * @opgroup og_sse2_pcksclr_datamove
3592 * @opxcpttype 1
3593 * @optest op1=1 op2=2 -> op1=2
3594 * @optest op1=0 op2=-42 -> op1=-42
3595 */
3596FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3597{
3598 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3600 if (IEM_IS_MODRM_REG_MODE(bRm))
3601 {
3602 /*
3603 * Register, register.
3604 */
3605 IEM_MC_BEGIN(0, 0, 0, 0);
3606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3607 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3608 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3609 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3610 IEM_GET_MODRM_REG(pVCpu, bRm));
3611 IEM_MC_ADVANCE_RIP_AND_FINISH();
3612 IEM_MC_END();
3613 }
3614 else
3615 {
3616 /*
3617 * Memory, register.
3618 */
3619 IEM_MC_BEGIN(0, 2, 0, 0);
3620 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3622
3623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3625 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3626 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3627
3628 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3629 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3630
3631 IEM_MC_ADVANCE_RIP_AND_FINISH();
3632 IEM_MC_END();
3633 }
3634}
3635
3636/* Opcode 0xf3 0x0f 0x29 - invalid */
3637/* Opcode 0xf2 0x0f 0x29 - invalid */
3638
3639
3640/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3641FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3642{
3643 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3645 if (IEM_IS_MODRM_REG_MODE(bRm))
3646 {
3647 /*
3648 * XMM, MMX
3649 */
3650 IEM_MC_BEGIN(3, 1, 0, 0);
3651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3652 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3653 IEM_MC_LOCAL(X86XMMREG, Dst);
3654 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3655 IEM_MC_ARG(uint64_t, u64Src, 2);
3656 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3657 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3658 IEM_MC_PREPARE_FPU_USAGE();
3659 IEM_MC_FPU_TO_MMX_MODE();
3660
3661 IEM_MC_REF_MXCSR(pfMxcsr);
3662 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3663 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3664
3665 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3666 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3667 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3668 } IEM_MC_ELSE() {
3669 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3670 } IEM_MC_ENDIF();
3671
3672 IEM_MC_ADVANCE_RIP_AND_FINISH();
3673 IEM_MC_END();
3674 }
3675 else
3676 {
3677 /*
3678 * XMM, [mem64]
3679 */
3680 IEM_MC_BEGIN(3, 2, 0, 0);
3681 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3682 IEM_MC_LOCAL(X86XMMREG, Dst);
3683 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3684 IEM_MC_ARG(uint64_t, u64Src, 2);
3685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3686
3687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3689 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3690 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3691 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3692
3693 IEM_MC_PREPARE_FPU_USAGE();
3694 IEM_MC_FPU_TO_MMX_MODE();
3695 IEM_MC_REF_MXCSR(pfMxcsr);
3696
3697 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3698 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3699 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3700 } IEM_MC_ELSE() {
3701 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3702 } IEM_MC_ENDIF();
3703
3704 IEM_MC_ADVANCE_RIP_AND_FINISH();
3705 IEM_MC_END();
3706 }
3707}
3708
3709
3710/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3711FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3712{
3713 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3715 if (IEM_IS_MODRM_REG_MODE(bRm))
3716 {
3717 /*
3718 * XMM, MMX
3719 */
3720 IEM_MC_BEGIN(3, 1, 0, 0);
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3722 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3723 IEM_MC_LOCAL(X86XMMREG, Dst);
3724 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3725 IEM_MC_ARG(uint64_t, u64Src, 2);
3726 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3727 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3728 IEM_MC_PREPARE_FPU_USAGE();
3729 IEM_MC_FPU_TO_MMX_MODE();
3730
3731 IEM_MC_REF_MXCSR(pfMxcsr);
3732 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3733
3734 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3735 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3736 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3737 } IEM_MC_ELSE() {
3738 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3739 } IEM_MC_ENDIF();
3740
3741 IEM_MC_ADVANCE_RIP_AND_FINISH();
3742 IEM_MC_END();
3743 }
3744 else
3745 {
3746 /*
3747 * XMM, [mem64]
3748 */
3749 IEM_MC_BEGIN(3, 3, 0, 0);
3750 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3751 IEM_MC_LOCAL(X86XMMREG, Dst);
3752 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3753 IEM_MC_ARG(uint64_t, u64Src, 2);
3754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3755
3756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3758 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3760 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3761
3762 /* Doesn't cause a transition to MMX mode. */
3763 IEM_MC_PREPARE_SSE_USAGE();
3764 IEM_MC_REF_MXCSR(pfMxcsr);
3765
3766 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3767 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3768 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3769 } IEM_MC_ELSE() {
3770 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3771 } IEM_MC_ENDIF();
3772
3773 IEM_MC_ADVANCE_RIP_AND_FINISH();
3774 IEM_MC_END();
3775 }
3776}
3777
3778
3779/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3780FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3781{
3782 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3783
3784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3785 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3786 {
3787 if (IEM_IS_MODRM_REG_MODE(bRm))
3788 {
3789 /* XMM, greg64 */
3790 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3791 IEM_MC_LOCAL(uint32_t, fMxcsr);
3792 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3793 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3794 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3795 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3796
3797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3798 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3799 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3800
3801 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3802 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3803 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3804 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3805 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3806 } IEM_MC_ELSE() {
3807 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3808 } IEM_MC_ENDIF();
3809
3810 IEM_MC_ADVANCE_RIP_AND_FINISH();
3811 IEM_MC_END();
3812 }
3813 else
3814 {
3815 /* XMM, [mem64] */
3816 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3818 IEM_MC_LOCAL(uint32_t, fMxcsr);
3819 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3820 IEM_MC_LOCAL(int64_t, i64Src);
3821 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3822 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3823 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3824
3825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3827 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3828 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3829
3830 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3831 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3832 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3833 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3834 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3835 } IEM_MC_ELSE() {
3836 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3837 } IEM_MC_ENDIF();
3838
3839 IEM_MC_ADVANCE_RIP_AND_FINISH();
3840 IEM_MC_END();
3841 }
3842 }
3843 else
3844 {
3845 if (IEM_IS_MODRM_REG_MODE(bRm))
3846 {
3847 /* greg, XMM */
3848 IEM_MC_BEGIN(3, 2, 0, 0);
3849 IEM_MC_LOCAL(uint32_t, fMxcsr);
3850 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3851 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3852 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3853 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3854
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3856 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3857 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3858
3859 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3860 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3861 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3862 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3863 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3864 } IEM_MC_ELSE() {
3865 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3866 } IEM_MC_ENDIF();
3867
3868 IEM_MC_ADVANCE_RIP_AND_FINISH();
3869 IEM_MC_END();
3870 }
3871 else
3872 {
3873 /* greg, [mem32] */
3874 IEM_MC_BEGIN(3, 4, 0, 0);
3875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3876 IEM_MC_LOCAL(uint32_t, fMxcsr);
3877 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3878 IEM_MC_LOCAL(int32_t, i32Src);
3879 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3880 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3881 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3882
3883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3885 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3886 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3887
3888 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3889 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3890 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3891 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3892 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3893 } IEM_MC_ELSE() {
3894 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3895 } IEM_MC_ENDIF();
3896
3897 IEM_MC_ADVANCE_RIP_AND_FINISH();
3898 IEM_MC_END();
3899 }
3900 }
3901}
3902
3903
3904/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3905FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3906{
3907 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3908
3909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3910 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3911 {
3912 if (IEM_IS_MODRM_REG_MODE(bRm))
3913 {
3914 /* XMM, greg64 */
3915 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3916 IEM_MC_LOCAL(uint32_t, fMxcsr);
3917 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3918 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3919 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3920 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3921
3922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3923 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3924 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3925
3926 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3927 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3928 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3929 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3930 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3931 } IEM_MC_ELSE() {
3932 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3933 } IEM_MC_ENDIF();
3934
3935 IEM_MC_ADVANCE_RIP_AND_FINISH();
3936 IEM_MC_END();
3937 }
3938 else
3939 {
3940 /* XMM, [mem64] */
3941 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3943 IEM_MC_LOCAL(uint32_t, fMxcsr);
3944 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3945 IEM_MC_LOCAL(int64_t, i64Src);
3946 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3947 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3948 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3949
3950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3952 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3953 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3954
3955 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3956 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3957 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3958 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3959 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3960 } IEM_MC_ELSE() {
3961 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3962 } IEM_MC_ENDIF();
3963
3964 IEM_MC_ADVANCE_RIP_AND_FINISH();
3965 IEM_MC_END();
3966 }
3967 }
3968 else
3969 {
3970 if (IEM_IS_MODRM_REG_MODE(bRm))
3971 {
3972 /* XMM, greg32 */
3973 IEM_MC_BEGIN(3, 2, 0, 0);
3974 IEM_MC_LOCAL(uint32_t, fMxcsr);
3975 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3976 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3977 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3978 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3979
3980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3981 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3982 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3983
3984 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3985 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3986 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3987 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3988 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3989 } IEM_MC_ELSE() {
3990 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3991 } IEM_MC_ENDIF();
3992
3993 IEM_MC_ADVANCE_RIP_AND_FINISH();
3994 IEM_MC_END();
3995 }
3996 else
3997 {
3998 /* XMM, [mem32] */
3999 IEM_MC_BEGIN(3, 4, 0, 0);
4000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4001 IEM_MC_LOCAL(uint32_t, fMxcsr);
4002 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
4003 IEM_MC_LOCAL(int32_t, i32Src);
4004 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4005 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
4006 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
4007
4008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4010 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4011 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4012
4013 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4014 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
4015 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4016 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4017 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4018 } IEM_MC_ELSE() {
4019 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
4020 } IEM_MC_ENDIF();
4021
4022 IEM_MC_ADVANCE_RIP_AND_FINISH();
4023 IEM_MC_END();
4024 }
4025 }
4026}
4027
4028
4029/**
4030 * @opcode 0x2b
4031 * @opcodesub !11 mr/reg
4032 * @oppfx none
4033 * @opcpuid sse
4034 * @opgroup og_sse1_cachect
4035 * @opxcpttype 1
4036 * @optest op1=1 op2=2 -> op1=2
4037 * @optest op1=0 op2=-42 -> op1=-42
4038 */
4039FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4040{
4041 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4043 if (IEM_IS_MODRM_MEM_MODE(bRm))
4044 {
4045 /*
4046 * memory, register.
4047 */
4048 IEM_MC_BEGIN(0, 2, 0, 0);
4049 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4051
4052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4054 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4055 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4056
4057 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4058 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4059
4060 IEM_MC_ADVANCE_RIP_AND_FINISH();
4061 IEM_MC_END();
4062 }
4063 /* The register, register encoding is invalid. */
4064 else
4065 IEMOP_RAISE_INVALID_OPCODE_RET();
4066}
4067
4068/**
4069 * @opcode 0x2b
4070 * @opcodesub !11 mr/reg
4071 * @oppfx 0x66
4072 * @opcpuid sse2
4073 * @opgroup og_sse2_cachect
4074 * @opxcpttype 1
4075 * @optest op1=1 op2=2 -> op1=2
4076 * @optest op1=0 op2=-42 -> op1=-42
4077 */
4078FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4079{
4080 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4082 if (IEM_IS_MODRM_MEM_MODE(bRm))
4083 {
4084 /*
4085 * memory, register.
4086 */
4087 IEM_MC_BEGIN(0, 2, 0, 0);
4088 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4090
4091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4093 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4094 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4095
4096 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4097 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4098
4099 IEM_MC_ADVANCE_RIP_AND_FINISH();
4100 IEM_MC_END();
4101 }
4102 /* The register, register encoding is invalid. */
4103 else
4104 IEMOP_RAISE_INVALID_OPCODE_RET();
4105}
4106/* Opcode 0xf3 0x0f 0x2b - invalid */
4107/* Opcode 0xf2 0x0f 0x2b - invalid */
4108
4109
4110/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4111FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4112{
4113 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4115 if (IEM_IS_MODRM_REG_MODE(bRm))
4116 {
4117 /*
4118 * Register, register.
4119 */
4120 IEM_MC_BEGIN(3, 1, 0, 0);
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4122 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4123 IEM_MC_LOCAL(uint64_t, u64Dst);
4124 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4125 IEM_MC_ARG(uint64_t, u64Src, 2);
4126 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4127 IEM_MC_PREPARE_FPU_USAGE();
4128 IEM_MC_FPU_TO_MMX_MODE();
4129
4130 IEM_MC_REF_MXCSR(pfMxcsr);
4131 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4132
4133 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4134 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4135 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4136 } IEM_MC_ELSE() {
4137 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4138 } IEM_MC_ENDIF();
4139
4140 IEM_MC_ADVANCE_RIP_AND_FINISH();
4141 IEM_MC_END();
4142 }
4143 else
4144 {
4145 /*
4146 * Register, memory.
4147 */
4148 IEM_MC_BEGIN(3, 2, 0, 0);
4149 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4150 IEM_MC_LOCAL(uint64_t, u64Dst);
4151 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4152 IEM_MC_ARG(uint64_t, u64Src, 2);
4153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4154
4155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4157 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4158 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4159
4160 IEM_MC_PREPARE_FPU_USAGE();
4161 IEM_MC_FPU_TO_MMX_MODE();
4162 IEM_MC_REF_MXCSR(pfMxcsr);
4163
4164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4165 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4166 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4167 } IEM_MC_ELSE() {
4168 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4169 } IEM_MC_ENDIF();
4170
4171 IEM_MC_ADVANCE_RIP_AND_FINISH();
4172 IEM_MC_END();
4173 }
4174}
4175
4176
4177/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4178FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4179{
4180 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4182 if (IEM_IS_MODRM_REG_MODE(bRm))
4183 {
4184 /*
4185 * Register, register.
4186 */
4187 IEM_MC_BEGIN(3, 1, 0, 0);
4188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4189 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4190 IEM_MC_LOCAL(uint64_t, u64Dst);
4191 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4192 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4193 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4194 IEM_MC_PREPARE_FPU_USAGE();
4195 IEM_MC_FPU_TO_MMX_MODE();
4196
4197 IEM_MC_REF_MXCSR(pfMxcsr);
4198 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4199
4200 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4201 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4202 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4203 } IEM_MC_ELSE() {
4204 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4205 } IEM_MC_ENDIF();
4206
4207 IEM_MC_ADVANCE_RIP_AND_FINISH();
4208 IEM_MC_END();
4209 }
4210 else
4211 {
4212 /*
4213 * Register, memory.
4214 */
4215 IEM_MC_BEGIN(3, 3, 0, 0);
4216 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4217 IEM_MC_LOCAL(uint64_t, u64Dst);
4218 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4219 IEM_MC_LOCAL(X86XMMREG, uSrc);
4220 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4222
4223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4225 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4226 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4227
4228 IEM_MC_PREPARE_FPU_USAGE();
4229 IEM_MC_FPU_TO_MMX_MODE();
4230
4231 IEM_MC_REF_MXCSR(pfMxcsr);
4232
4233 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4234 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4235 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4236 } IEM_MC_ELSE() {
4237 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4238 } IEM_MC_ENDIF();
4239
4240 IEM_MC_ADVANCE_RIP_AND_FINISH();
4241 IEM_MC_END();
4242 }
4243}
4244
4245
4246/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4247FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4248{
4249 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4250
4251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4252 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4253 {
4254 if (IEM_IS_MODRM_REG_MODE(bRm))
4255 {
4256 /* greg64, XMM */
4257 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4258 IEM_MC_LOCAL(uint32_t, fMxcsr);
4259 IEM_MC_LOCAL(int64_t, i64Dst);
4260 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4261 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4262 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4263
4264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4266 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4267
4268 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4269 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4270 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4271 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4272 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4273 } IEM_MC_ELSE() {
4274 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4275 } IEM_MC_ENDIF();
4276
4277 IEM_MC_ADVANCE_RIP_AND_FINISH();
4278 IEM_MC_END();
4279 }
4280 else
4281 {
4282 /* greg64, [mem64] */
4283 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4285 IEM_MC_LOCAL(uint32_t, fMxcsr);
4286 IEM_MC_LOCAL(int64_t, i64Dst);
4287 IEM_MC_LOCAL(uint32_t, u32Src);
4288 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4289 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4290 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4291
4292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4294 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4295 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4296
4297 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4298 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4299 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4300 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4301 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4302 } IEM_MC_ELSE() {
4303 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4304 } IEM_MC_ENDIF();
4305
4306 IEM_MC_ADVANCE_RIP_AND_FINISH();
4307 IEM_MC_END();
4308 }
4309 }
4310 else
4311 {
4312 if (IEM_IS_MODRM_REG_MODE(bRm))
4313 {
4314 /* greg, XMM */
4315 IEM_MC_BEGIN(3, 2, 0, 0);
4316 IEM_MC_LOCAL(uint32_t, fMxcsr);
4317 IEM_MC_LOCAL(int32_t, i32Dst);
4318 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4319 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4320 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4321
4322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4323 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4324 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4325
4326 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4327 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4328 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4329 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4330 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4331 } IEM_MC_ELSE() {
4332 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4333 } IEM_MC_ENDIF();
4334
4335 IEM_MC_ADVANCE_RIP_AND_FINISH();
4336 IEM_MC_END();
4337 }
4338 else
4339 {
4340 /* greg, [mem] */
4341 IEM_MC_BEGIN(3, 4, 0, 0);
4342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4343 IEM_MC_LOCAL(uint32_t, fMxcsr);
4344 IEM_MC_LOCAL(int32_t, i32Dst);
4345 IEM_MC_LOCAL(uint32_t, u32Src);
4346 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4347 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4348 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4349
4350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4352 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4353 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4354
4355 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4356 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4357 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4358 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4359 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4360 } IEM_MC_ELSE() {
4361 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4362 } IEM_MC_ENDIF();
4363
4364 IEM_MC_ADVANCE_RIP_AND_FINISH();
4365 IEM_MC_END();
4366 }
4367 }
4368}
4369
4370
4371/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4372FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4373{
4374 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4375
4376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4377 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4378 {
4379 if (IEM_IS_MODRM_REG_MODE(bRm))
4380 {
4381 /* greg64, XMM */
4382 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4383 IEM_MC_LOCAL(uint32_t, fMxcsr);
4384 IEM_MC_LOCAL(int64_t, i64Dst);
4385 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4386 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4387 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4388
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4390 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4391 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4392
4393 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4394 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4395 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4396 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4397 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4398 } IEM_MC_ELSE() {
4399 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4400 } IEM_MC_ENDIF();
4401
4402 IEM_MC_ADVANCE_RIP_AND_FINISH();
4403 IEM_MC_END();
4404 }
4405 else
4406 {
4407 /* greg64, [mem64] */
4408 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4410 IEM_MC_LOCAL(uint32_t, fMxcsr);
4411 IEM_MC_LOCAL(int64_t, i64Dst);
4412 IEM_MC_LOCAL(uint64_t, u64Src);
4413 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4414 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4415 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4416
4417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4419 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4420 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4421
4422 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4423 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4424 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4425 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4426 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4427 } IEM_MC_ELSE() {
4428 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4429 } IEM_MC_ENDIF();
4430
4431 IEM_MC_ADVANCE_RIP_AND_FINISH();
4432 IEM_MC_END();
4433 }
4434 }
4435 else
4436 {
4437 if (IEM_IS_MODRM_REG_MODE(bRm))
4438 {
4439 /* greg, XMM */
4440 IEM_MC_BEGIN(3, 2, 0, 0);
4441 IEM_MC_LOCAL(uint32_t, fMxcsr);
4442 IEM_MC_LOCAL(int32_t, i32Dst);
4443 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4444 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4445 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4446
4447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4448 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4449 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4450
4451 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4452 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4453 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4454 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4455 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4456 } IEM_MC_ELSE() {
4457 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4458 } IEM_MC_ENDIF();
4459
4460 IEM_MC_ADVANCE_RIP_AND_FINISH();
4461 IEM_MC_END();
4462 }
4463 else
4464 {
4465 /* greg32, [mem32] */
4466 IEM_MC_BEGIN(3, 4, 0, 0);
4467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4468 IEM_MC_LOCAL(uint32_t, fMxcsr);
4469 IEM_MC_LOCAL(int32_t, i32Dst);
4470 IEM_MC_LOCAL(uint64_t, u64Src);
4471 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4472 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4473 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4474
4475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4477 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4478 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4479
4480 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4481 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4482 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4483 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4484 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4485 } IEM_MC_ELSE() {
4486 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4487 } IEM_MC_ENDIF();
4488
4489 IEM_MC_ADVANCE_RIP_AND_FINISH();
4490 IEM_MC_END();
4491 }
4492 }
4493}
4494
4495
4496/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4497FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4498{
4499 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4501 if (IEM_IS_MODRM_REG_MODE(bRm))
4502 {
4503 /*
4504 * Register, register.
4505 */
4506 IEM_MC_BEGIN(3, 1, 0, 0);
4507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4508 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4509 IEM_MC_LOCAL(uint64_t, u64Dst);
4510 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4511 IEM_MC_ARG(uint64_t, u64Src, 2);
4512
4513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4514 IEM_MC_PREPARE_FPU_USAGE();
4515 IEM_MC_FPU_TO_MMX_MODE();
4516
4517 IEM_MC_REF_MXCSR(pfMxcsr);
4518 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4519
4520 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4521 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4522 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4523 } IEM_MC_ELSE() {
4524 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4525 } IEM_MC_ENDIF();
4526
4527 IEM_MC_ADVANCE_RIP_AND_FINISH();
4528 IEM_MC_END();
4529 }
4530 else
4531 {
4532 /*
4533 * Register, memory.
4534 */
4535 IEM_MC_BEGIN(3, 2, 0, 0);
4536 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4537 IEM_MC_LOCAL(uint64_t, u64Dst);
4538 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4539 IEM_MC_ARG(uint64_t, u64Src, 2);
4540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4541
4542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4545 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4546
4547 IEM_MC_PREPARE_FPU_USAGE();
4548 IEM_MC_FPU_TO_MMX_MODE();
4549 IEM_MC_REF_MXCSR(pfMxcsr);
4550
4551 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4552 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4553 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4554 } IEM_MC_ELSE() {
4555 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4556 } IEM_MC_ENDIF();
4557
4558 IEM_MC_ADVANCE_RIP_AND_FINISH();
4559 IEM_MC_END();
4560 }
4561}
4562
4563
4564/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4565FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4566{
4567 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4569 if (IEM_IS_MODRM_REG_MODE(bRm))
4570 {
4571 /*
4572 * Register, register.
4573 */
4574 IEM_MC_BEGIN(3, 1, 0, 0);
4575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4576 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4577 IEM_MC_LOCAL(uint64_t, u64Dst);
4578 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4579 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4580
4581 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4582 IEM_MC_PREPARE_FPU_USAGE();
4583 IEM_MC_FPU_TO_MMX_MODE();
4584
4585 IEM_MC_REF_MXCSR(pfMxcsr);
4586 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4587
4588 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4589 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4590 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4591 } IEM_MC_ELSE() {
4592 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4593 } IEM_MC_ENDIF();
4594
4595 IEM_MC_ADVANCE_RIP_AND_FINISH();
4596 IEM_MC_END();
4597 }
4598 else
4599 {
4600 /*
4601 * Register, memory.
4602 */
4603 IEM_MC_BEGIN(3, 3, 0, 0);
4604 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4605 IEM_MC_LOCAL(uint64_t, u64Dst);
4606 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4607 IEM_MC_LOCAL(X86XMMREG, uSrc);
4608 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4610
4611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4613 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4614 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4615
4616 IEM_MC_PREPARE_FPU_USAGE();
4617 IEM_MC_FPU_TO_MMX_MODE();
4618
4619 IEM_MC_REF_MXCSR(pfMxcsr);
4620
4621 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4622 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4623 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4624 } IEM_MC_ELSE() {
4625 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4626 } IEM_MC_ENDIF();
4627
4628 IEM_MC_ADVANCE_RIP_AND_FINISH();
4629 IEM_MC_END();
4630 }
4631}
4632
4633
4634/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4635FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4636{
4637 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4638
4639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4640 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4641 {
4642 if (IEM_IS_MODRM_REG_MODE(bRm))
4643 {
4644 /* greg64, XMM */
4645 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4646 IEM_MC_LOCAL(uint32_t, fMxcsr);
4647 IEM_MC_LOCAL(int64_t, i64Dst);
4648 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4649 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4650 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4651
4652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4654 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4655
4656 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4657 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4658 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4659 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4660 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4661 } IEM_MC_ELSE() {
4662 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4663 } IEM_MC_ENDIF();
4664
4665 IEM_MC_ADVANCE_RIP_AND_FINISH();
4666 IEM_MC_END();
4667 }
4668 else
4669 {
4670 /* greg64, [mem64] */
4671 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4673 IEM_MC_LOCAL(uint32_t, fMxcsr);
4674 IEM_MC_LOCAL(int64_t, i64Dst);
4675 IEM_MC_LOCAL(uint32_t, u32Src);
4676 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4677 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4678 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4679
4680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4682 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4683 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4684
4685 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4686 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4687 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4688 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4689 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4690 } IEM_MC_ELSE() {
4691 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4692 } IEM_MC_ENDIF();
4693
4694 IEM_MC_ADVANCE_RIP_AND_FINISH();
4695 IEM_MC_END();
4696 }
4697 }
4698 else
4699 {
4700 if (IEM_IS_MODRM_REG_MODE(bRm))
4701 {
4702 /* greg, XMM */
4703 IEM_MC_BEGIN(3, 2, 0, 0);
4704 IEM_MC_LOCAL(uint32_t, fMxcsr);
4705 IEM_MC_LOCAL(int32_t, i32Dst);
4706 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4707 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4708 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4709
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4711 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4712 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4713
4714 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4715 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4716 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4717 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4718 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4719 } IEM_MC_ELSE() {
4720 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4721 } IEM_MC_ENDIF();
4722
4723 IEM_MC_ADVANCE_RIP_AND_FINISH();
4724 IEM_MC_END();
4725 }
4726 else
4727 {
4728 /* greg, [mem] */
4729 IEM_MC_BEGIN(3, 4, 0, 0);
4730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4731 IEM_MC_LOCAL(uint32_t, fMxcsr);
4732 IEM_MC_LOCAL(int32_t, i32Dst);
4733 IEM_MC_LOCAL(uint32_t, u32Src);
4734 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4735 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4736 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4737
4738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4740 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4741 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4742
4743 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4744 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4745 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4746 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4747 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4748 } IEM_MC_ELSE() {
4749 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4750 } IEM_MC_ENDIF();
4751
4752 IEM_MC_ADVANCE_RIP_AND_FINISH();
4753 IEM_MC_END();
4754 }
4755 }
4756}
4757
4758
4759/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4760FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4761{
4762 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4763
4764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4765 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4766 {
4767 if (IEM_IS_MODRM_REG_MODE(bRm))
4768 {
4769 /* greg64, XMM */
4770 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4771 IEM_MC_LOCAL(uint32_t, fMxcsr);
4772 IEM_MC_LOCAL(int64_t, i64Dst);
4773 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4774 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4775 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4776
4777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4779 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4780
4781 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4782 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4783 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4784 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4785 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4786 } IEM_MC_ELSE() {
4787 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4788 } IEM_MC_ENDIF();
4789
4790 IEM_MC_ADVANCE_RIP_AND_FINISH();
4791 IEM_MC_END();
4792 }
4793 else
4794 {
4795 /* greg64, [mem64] */
4796 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4798 IEM_MC_LOCAL(uint32_t, fMxcsr);
4799 IEM_MC_LOCAL(int64_t, i64Dst);
4800 IEM_MC_LOCAL(uint64_t, u64Src);
4801 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4802 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4803 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4804
4805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4808 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4809
4810 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4811 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4812 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4813 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4814 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4815 } IEM_MC_ELSE() {
4816 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4817 } IEM_MC_ENDIF();
4818
4819 IEM_MC_ADVANCE_RIP_AND_FINISH();
4820 IEM_MC_END();
4821 }
4822 }
4823 else
4824 {
4825 if (IEM_IS_MODRM_REG_MODE(bRm))
4826 {
4827 /* greg32, XMM */
4828 IEM_MC_BEGIN(3, 2, 0, 0);
4829 IEM_MC_LOCAL(uint32_t, fMxcsr);
4830 IEM_MC_LOCAL(int32_t, i32Dst);
4831 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4832 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4833 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4834
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4837 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4838
4839 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4840 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4841 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4842 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4843 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4844 } IEM_MC_ELSE() {
4845 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4846 } IEM_MC_ENDIF();
4847
4848 IEM_MC_ADVANCE_RIP_AND_FINISH();
4849 IEM_MC_END();
4850 }
4851 else
4852 {
4853 /* greg32, [mem64] */
4854 IEM_MC_BEGIN(3, 4, 0, 0);
4855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4856 IEM_MC_LOCAL(uint32_t, fMxcsr);
4857 IEM_MC_LOCAL(int32_t, i32Dst);
4858 IEM_MC_LOCAL(uint64_t, u64Src);
4859 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4860 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4861 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4862
4863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4865 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4866 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4867
4868 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4869 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4870 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4871 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4872 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4873 } IEM_MC_ELSE() {
4874 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4875 } IEM_MC_ENDIF();
4876
4877 IEM_MC_ADVANCE_RIP_AND_FINISH();
4878 IEM_MC_END();
4879 }
4880 }
4881}
4882
4883
4884/**
4885 * @opcode 0x2e
4886 * @oppfx none
4887 * @opflmodify cf,pf,af,zf,sf,of
4888 * @opflclear af,sf,of
4889 */
4890FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4891{
4892 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4894 if (IEM_IS_MODRM_REG_MODE(bRm))
4895 {
4896 /*
4897 * Register, register.
4898 */
4899 IEM_MC_BEGIN(4, 1, 0, 0);
4900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4901 IEM_MC_LOCAL(uint32_t, fEFlags);
4902 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4903 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4904 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4905 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4906 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4907 IEM_MC_PREPARE_SSE_USAGE();
4908 IEM_MC_FETCH_EFLAGS(fEFlags);
4909 IEM_MC_REF_MXCSR(pfMxcsr);
4910 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4911 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4912 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4913 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4914 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4915 } IEM_MC_ELSE() {
4916 IEM_MC_COMMIT_EFLAGS(fEFlags);
4917 } IEM_MC_ENDIF();
4918
4919 IEM_MC_ADVANCE_RIP_AND_FINISH();
4920 IEM_MC_END();
4921 }
4922 else
4923 {
4924 /*
4925 * Register, memory.
4926 */
4927 IEM_MC_BEGIN(4, 3, 0, 0);
4928 IEM_MC_LOCAL(uint32_t, fEFlags);
4929 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4930 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4931 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4932 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4933 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4935
4936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4938 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4939 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4940
4941 IEM_MC_PREPARE_SSE_USAGE();
4942 IEM_MC_FETCH_EFLAGS(fEFlags);
4943 IEM_MC_REF_MXCSR(pfMxcsr);
4944 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4945 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4946 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4947 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4948 } IEM_MC_ELSE() {
4949 IEM_MC_COMMIT_EFLAGS(fEFlags);
4950 } IEM_MC_ENDIF();
4951
4952 IEM_MC_ADVANCE_RIP_AND_FINISH();
4953 IEM_MC_END();
4954 }
4955}
4956
4957
4958/**
4959 * @opcode 0x2e
4960 * @oppfx 0x66
4961 * @opflmodify cf,pf,af,zf,sf,of
4962 * @opflclear af,sf,of
4963 */
4964FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4965{
4966 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4968 if (IEM_IS_MODRM_REG_MODE(bRm))
4969 {
4970 /*
4971 * Register, register.
4972 */
4973 IEM_MC_BEGIN(4, 1, 0, 0);
4974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4975 IEM_MC_LOCAL(uint32_t, fEFlags);
4976 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4977 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4978 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4979 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4980 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4981 IEM_MC_PREPARE_SSE_USAGE();
4982 IEM_MC_FETCH_EFLAGS(fEFlags);
4983 IEM_MC_REF_MXCSR(pfMxcsr);
4984 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4985 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4986 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4987 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4988 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4989 } IEM_MC_ELSE() {
4990 IEM_MC_COMMIT_EFLAGS(fEFlags);
4991 } IEM_MC_ENDIF();
4992
4993 IEM_MC_ADVANCE_RIP_AND_FINISH();
4994 IEM_MC_END();
4995 }
4996 else
4997 {
4998 /*
4999 * Register, memory.
5000 */
5001 IEM_MC_BEGIN(4, 3, 0, 0);
5002 IEM_MC_LOCAL(uint32_t, fEFlags);
5003 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5004 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5005 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5006 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5007 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5009
5010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5013 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5014
5015 IEM_MC_PREPARE_SSE_USAGE();
5016 IEM_MC_FETCH_EFLAGS(fEFlags);
5017 IEM_MC_REF_MXCSR(pfMxcsr);
5018 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5019 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5020 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5021 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5022 } IEM_MC_ELSE() {
5023 IEM_MC_COMMIT_EFLAGS(fEFlags);
5024 } IEM_MC_ENDIF();
5025
5026 IEM_MC_ADVANCE_RIP_AND_FINISH();
5027 IEM_MC_END();
5028 }
5029}
5030
5031
5032/* Opcode 0xf3 0x0f 0x2e - invalid */
5033/* Opcode 0xf2 0x0f 0x2e - invalid */
5034
5035
5036/**
5037 * @opcode 0x2e
5038 * @oppfx none
5039 * @opflmodify cf,pf,af,zf,sf,of
5040 * @opflclear af,sf,of
5041 */
5042FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5043{
5044 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5046 if (IEM_IS_MODRM_REG_MODE(bRm))
5047 {
5048 /*
5049 * Register, register.
5050 */
5051 IEM_MC_BEGIN(4, 1, 0, 0);
5052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5053 IEM_MC_LOCAL(uint32_t, fEFlags);
5054 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5055 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5056 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5057 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5058 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5059 IEM_MC_PREPARE_SSE_USAGE();
5060 IEM_MC_FETCH_EFLAGS(fEFlags);
5061 IEM_MC_REF_MXCSR(pfMxcsr);
5062 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5063 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5064 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5065 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5066 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5067 } IEM_MC_ELSE() {
5068 IEM_MC_COMMIT_EFLAGS(fEFlags);
5069 } IEM_MC_ENDIF();
5070
5071 IEM_MC_ADVANCE_RIP_AND_FINISH();
5072 IEM_MC_END();
5073 }
5074 else
5075 {
5076 /*
5077 * Register, memory.
5078 */
5079 IEM_MC_BEGIN(4, 3, 0, 0);
5080 IEM_MC_LOCAL(uint32_t, fEFlags);
5081 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5082 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5083 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5084 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5085 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5087
5088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5090 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5091 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5092
5093 IEM_MC_PREPARE_SSE_USAGE();
5094 IEM_MC_FETCH_EFLAGS(fEFlags);
5095 IEM_MC_REF_MXCSR(pfMxcsr);
5096 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5097 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5098 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5099 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5100 } IEM_MC_ELSE() {
5101 IEM_MC_COMMIT_EFLAGS(fEFlags);
5102 } IEM_MC_ENDIF();
5103
5104 IEM_MC_ADVANCE_RIP_AND_FINISH();
5105 IEM_MC_END();
5106 }
5107}
5108
5109
5110/**
5111 * @opcode 0x2f
5112 * @oppfx 0x66
5113 * @opflmodify cf,pf,af,zf,sf,of
5114 * @opflclear af,sf,of
5115 */
5116FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5117{
5118 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5120 if (IEM_IS_MODRM_REG_MODE(bRm))
5121 {
5122 /*
5123 * Register, register.
5124 */
5125 IEM_MC_BEGIN(4, 1, 0, 0);
5126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5127 IEM_MC_LOCAL(uint32_t, fEFlags);
5128 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5129 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5130 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5131 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5132 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5133 IEM_MC_PREPARE_SSE_USAGE();
5134 IEM_MC_FETCH_EFLAGS(fEFlags);
5135 IEM_MC_REF_MXCSR(pfMxcsr);
5136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5137 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5138 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5139 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5140 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5141 } IEM_MC_ELSE() {
5142 IEM_MC_COMMIT_EFLAGS(fEFlags);
5143 } IEM_MC_ENDIF();
5144
5145 IEM_MC_ADVANCE_RIP_AND_FINISH();
5146 IEM_MC_END();
5147 }
5148 else
5149 {
5150 /*
5151 * Register, memory.
5152 */
5153 IEM_MC_BEGIN(4, 3, 0, 0);
5154 IEM_MC_LOCAL(uint32_t, fEFlags);
5155 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5156 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5157 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5158 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5159 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5161
5162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5164 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5165 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5166
5167 IEM_MC_PREPARE_SSE_USAGE();
5168 IEM_MC_FETCH_EFLAGS(fEFlags);
5169 IEM_MC_REF_MXCSR(pfMxcsr);
5170 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5171 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5172 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5173 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5174 } IEM_MC_ELSE() {
5175 IEM_MC_COMMIT_EFLAGS(fEFlags);
5176 } IEM_MC_ENDIF();
5177
5178 IEM_MC_ADVANCE_RIP_AND_FINISH();
5179 IEM_MC_END();
5180 }
5181}
5182
5183
5184/* Opcode 0xf3 0x0f 0x2f - invalid */
5185/* Opcode 0xf2 0x0f 0x2f - invalid */
5186
5187/** Opcode 0x0f 0x30. */
5188FNIEMOP_DEF(iemOp_wrmsr)
5189{
5190 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5192 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
5193}
5194
5195
5196/** Opcode 0x0f 0x31. */
5197FNIEMOP_DEF(iemOp_rdtsc)
5198{
5199 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5201 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5202 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5203 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5204 iemCImpl_rdtsc);
5205}
5206
5207
5208/** Opcode 0x0f 0x33. */
5209FNIEMOP_DEF(iemOp_rdmsr)
5210{
5211 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5213 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5214 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5215 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5216 iemCImpl_rdmsr);
5217}
5218
5219
5220/** Opcode 0x0f 0x34. */
5221FNIEMOP_DEF(iemOp_rdpmc)
5222{
5223 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5225 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5226 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5227 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5228 iemCImpl_rdpmc);
5229}
5230
5231
5232/** Opcode 0x0f 0x34. */
5233FNIEMOP_DEF(iemOp_sysenter)
5234{
5235 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
5238 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
5239 iemCImpl_sysenter);
5240}
5241
5242/** Opcode 0x0f 0x35. */
5243FNIEMOP_DEF(iemOp_sysexit)
5244{
5245 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5247 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
5248 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
5249 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5250}
5251
5252/** Opcode 0x0f 0x37. */
5253FNIEMOP_STUB(iemOp_getsec);
5254
5255
5256/** Opcode 0x0f 0x38. */
5257FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5258{
5259#ifdef IEM_WITH_THREE_0F_38
5260 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5261 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5262#else
5263 IEMOP_BITCH_ABOUT_STUB();
5264 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5265#endif
5266}
5267
5268
5269/** Opcode 0x0f 0x3a. */
5270FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5271{
5272#ifdef IEM_WITH_THREE_0F_3A
5273 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5274 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5275#else
5276 IEMOP_BITCH_ABOUT_STUB();
5277 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5278#endif
5279}
5280
5281
5282/**
5283 * Implements a conditional move.
5284 *
5285 * Wish there was an obvious way to do this where we could share and reduce
5286 * code bloat.
5287 *
5288 * @param a_Cnd The conditional "microcode" operation.
5289 */
5290#define CMOV_X(a_Cnd) \
5291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5292 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5293 { \
5294 switch (pVCpu->iem.s.enmEffOpSize) \
5295 { \
5296 case IEMMODE_16BIT: \
5297 IEM_MC_BEGIN(0, 1, 0, 0); \
5298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5299 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5300 a_Cnd { \
5301 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5302 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5303 } IEM_MC_ENDIF(); \
5304 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5305 IEM_MC_END(); \
5306 break; \
5307 \
5308 case IEMMODE_32BIT: \
5309 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0); \
5310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5311 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5312 a_Cnd { \
5313 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5314 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5315 } IEM_MC_ELSE() { \
5316 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5317 } IEM_MC_ENDIF(); \
5318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5319 IEM_MC_END(); \
5320 break; \
5321 \
5322 case IEMMODE_64BIT: \
5323 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0); \
5324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5325 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5326 a_Cnd { \
5327 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5328 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5329 } IEM_MC_ENDIF(); \
5330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5331 IEM_MC_END(); \
5332 break; \
5333 \
5334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5335 } \
5336 } \
5337 else \
5338 { \
5339 switch (pVCpu->iem.s.enmEffOpSize) \
5340 { \
5341 case IEMMODE_16BIT: \
5342 IEM_MC_BEGIN(0, 2, 0, 0); \
5343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5344 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5347 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5348 a_Cnd { \
5349 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5350 } IEM_MC_ENDIF(); \
5351 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5352 IEM_MC_END(); \
5353 break; \
5354 \
5355 case IEMMODE_32BIT: \
5356 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0); \
5357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5358 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5361 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5362 a_Cnd { \
5363 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5364 } IEM_MC_ELSE() { \
5365 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5366 } IEM_MC_ENDIF(); \
5367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5368 IEM_MC_END(); \
5369 break; \
5370 \
5371 case IEMMODE_64BIT: \
5372 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0); \
5373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5374 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5377 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5378 a_Cnd { \
5379 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5380 } IEM_MC_ENDIF(); \
5381 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5382 IEM_MC_END(); \
5383 break; \
5384 \
5385 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5386 } \
5387 } do {} while (0)
5388
5389
5390
5391/**
5392 * @opcode 0x40
5393 * @opfltest of
5394 */
5395FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5396{
5397 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5399}
5400
5401
5402/**
5403 * @opcode 0x41
5404 * @opfltest of
5405 */
5406FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5407{
5408 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5409 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5410}
5411
5412
5413/**
5414 * @opcode 0x42
5415 * @opfltest cf
5416 */
5417FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5418{
5419 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5420 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5421}
5422
5423
5424/**
5425 * @opcode 0x43
5426 * @opfltest cf
5427 */
5428FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5429{
5430 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5431 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5432}
5433
5434
5435/**
5436 * @opcode 0x44
5437 * @opfltest zf
5438 */
5439FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5440{
5441 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5442 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5443}
5444
5445
5446/**
5447 * @opcode 0x45
5448 * @opfltest zf
5449 */
5450FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5451{
5452 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5453 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5454}
5455
5456
5457/**
5458 * @opcode 0x46
5459 * @opfltest cf,zf
5460 */
5461FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5462{
5463 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5464 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5465}
5466
5467
5468/**
5469 * @opcode 0x47
5470 * @opfltest cf,zf
5471 */
5472FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5473{
5474 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5475 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5476}
5477
5478
5479/**
5480 * @opcode 0x48
5481 * @opfltest sf
5482 */
5483FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5484{
5485 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5486 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5487}
5488
5489
5490/**
5491 * @opcode 0x49
5492 * @opfltest sf
5493 */
5494FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5495{
5496 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5497 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5498}
5499
5500
5501/**
5502 * @opcode 0x4a
5503 * @opfltest pf
5504 */
5505FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5506{
5507 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5508 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5509}
5510
5511
5512/**
5513 * @opcode 0x4b
5514 * @opfltest pf
5515 */
5516FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5517{
5518 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5519 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5520}
5521
5522
5523/**
5524 * @opcode 0x4c
5525 * @opfltest sf,of
5526 */
5527FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5528{
5529 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5530 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5531}
5532
5533
5534/**
5535 * @opcode 0x4d
5536 * @opfltest sf,of
5537 */
5538FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5539{
5540 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5541 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5542}
5543
5544
5545/**
5546 * @opcode 0x4e
5547 * @opfltest zf,sf,of
5548 */
5549FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5550{
5551 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5552 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5553}
5554
5555
5556/**
5557 * @opcode 0x4e
5558 * @opfltest zf,sf,of
5559 */
5560FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5561{
5562 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5563 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5564}
5565
5566#undef CMOV_X
5567
5568/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5569FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5570{
5571 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5573 if (IEM_IS_MODRM_REG_MODE(bRm))
5574 {
5575 /*
5576 * Register, register.
5577 */
5578 IEM_MC_BEGIN(2, 1, 0, 0);
5579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5580 IEM_MC_LOCAL(uint8_t, u8Dst);
5581 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5582 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5583 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5584 IEM_MC_PREPARE_SSE_USAGE();
5585 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5586 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5587 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5588 IEM_MC_ADVANCE_RIP_AND_FINISH();
5589 IEM_MC_END();
5590 }
5591 /* No memory operand. */
5592 else
5593 IEMOP_RAISE_INVALID_OPCODE_RET();
5594}
5595
5596
5597/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5598FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5599{
5600 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5602 if (IEM_IS_MODRM_REG_MODE(bRm))
5603 {
5604 /*
5605 * Register, register.
5606 */
5607 IEM_MC_BEGIN(2, 1, 0, 0);
5608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5609 IEM_MC_LOCAL(uint8_t, u8Dst);
5610 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5611 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5613 IEM_MC_PREPARE_SSE_USAGE();
5614 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5615 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5616 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5617 IEM_MC_ADVANCE_RIP_AND_FINISH();
5618 IEM_MC_END();
5619 }
5620 /* No memory operand. */
5621 else
5622 IEMOP_RAISE_INVALID_OPCODE_RET();
5623
5624}
5625
5626
5627/* Opcode 0xf3 0x0f 0x50 - invalid */
5628/* Opcode 0xf2 0x0f 0x50 - invalid */
5629
5630
5631/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5632FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5633{
5634 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5635 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5636}
5637
5638
5639/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5640FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5641{
5642 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5643 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5644}
5645
5646
5647/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5648FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5649{
5650 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5651 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5652}
5653
5654
5655/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5656FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5657{
5658 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5659 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5660}
5661
5662
5663/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5664FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5665{
5666 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5667 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5668}
5669
5670
5671/* Opcode 0x66 0x0f 0x52 - invalid */
5672
5673
5674/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5675FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5676{
5677 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5678 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5679}
5680
5681
5682/* Opcode 0xf2 0x0f 0x52 - invalid */
5683
5684
5685/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5686FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5687{
5688 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5689 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5690}
5691
5692
5693/* Opcode 0x66 0x0f 0x53 - invalid */
5694
5695
5696/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5697FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5698{
5699 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5700 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5701}
5702
5703
5704/* Opcode 0xf2 0x0f 0x53 - invalid */
5705
5706
5707/** Opcode 0x0f 0x54 - andps Vps, Wps */
5708FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5709{
5710 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5711 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5712}
5713
5714
5715/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5716FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5717{
5718 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5719 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5720}
5721
5722
5723/* Opcode 0xf3 0x0f 0x54 - invalid */
5724/* Opcode 0xf2 0x0f 0x54 - invalid */
5725
5726
5727/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5728FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5729{
5730 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5731 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5732}
5733
5734
5735/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5736FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5737{
5738 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5739 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5740}
5741
5742
5743/* Opcode 0xf3 0x0f 0x55 - invalid */
5744/* Opcode 0xf2 0x0f 0x55 - invalid */
5745
5746
5747/** Opcode 0x0f 0x56 - orps Vps, Wps */
5748FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5749{
5750 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5751 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5752}
5753
5754
5755/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5756FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5757{
5758 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5759 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5760}
5761
5762
5763/* Opcode 0xf3 0x0f 0x56 - invalid */
5764/* Opcode 0xf2 0x0f 0x56 - invalid */
5765
5766
5767/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5768FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5769{
5770 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5771 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5772}
5773
5774
5775/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5776FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5777{
5778 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5779 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5780}
5781
5782
5783/* Opcode 0xf3 0x0f 0x57 - invalid */
5784/* Opcode 0xf2 0x0f 0x57 - invalid */
5785
5786/** Opcode 0x0f 0x58 - addps Vps, Wps */
5787FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5788{
5789 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5790 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5791}
5792
5793
5794/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5795FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5796{
5797 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5798 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5799}
5800
5801
5802/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5803FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5804{
5805 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5806 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5807}
5808
5809
5810/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5811FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5812{
5813 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5814 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5815}
5816
5817
5818/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5819FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5820{
5821 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5822 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5823}
5824
5825
5826/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5827FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5828{
5829 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5830 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5831}
5832
5833
5834/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5835FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5836{
5837 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5838 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5839}
5840
5841
5842/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5843FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5844{
5845 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5846 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5847}
5848
5849
5850/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5851FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5852{
5853 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5854 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5855}
5856
5857
5858/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5859FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5860{
5861 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5862 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5863}
5864
5865
5866/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5867FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5868{
5869 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5870 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5871}
5872
5873
5874/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5875FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5876{
5877 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5878 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5879}
5880
5881
5882/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5883FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5884{
5885 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5886 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5887}
5888
5889
5890/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5891FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5892{
5893 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5894 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5895}
5896
5897
5898/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5899FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5900{
5901 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5902 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5903}
5904
5905
5906/* Opcode 0xf2 0x0f 0x5b - invalid */
5907
5908
5909/** Opcode 0x0f 0x5c - subps Vps, Wps */
5910FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5911{
5912 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5913 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5914}
5915
5916
5917/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5918FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5919{
5920 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5921 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5922}
5923
5924
5925/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5926FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5927{
5928 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5929 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5930}
5931
5932
5933/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5934FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5935{
5936 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5937 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5938}
5939
5940
5941/** Opcode 0x0f 0x5d - minps Vps, Wps */
5942FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5943{
5944 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5945 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5946}
5947
5948
5949/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5950FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5951{
5952 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5953 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5954}
5955
5956
5957/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5958FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5959{
5960 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5961 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5962}
5963
5964
5965/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5966FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5967{
5968 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5969 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5970}
5971
5972
5973/** Opcode 0x0f 0x5e - divps Vps, Wps */
5974FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5975{
5976 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5977 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5978}
5979
5980
5981/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5982FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5983{
5984 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5985 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5986}
5987
5988
5989/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5990FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5991{
5992 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5993 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5994}
5995
5996
5997/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5998FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5999{
6000 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
6001 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
6002}
6003
6004
6005/** Opcode 0x0f 0x5f - maxps Vps, Wps */
6006FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
6007{
6008 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
6009 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
6010}
6011
6012
6013/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
6014FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
6015{
6016 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
6017 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
6018}
6019
6020
6021/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
6022FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
6023{
6024 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
6025 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
6026}
6027
6028
6029/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
6030FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
6031{
6032 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
6033 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
6034}
6035
6036
6037/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
6038FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
6039{
6040 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6041 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
6042}
6043
6044
6045/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
6046FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
6047{
6048 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6049 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
6050}
6051
6052
6053/* Opcode 0xf3 0x0f 0x60 - invalid */
6054
6055
6056/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
6057FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
6058{
6059 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
6060 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6061 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
6062}
6063
6064
6065/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
6066FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
6067{
6068 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6069 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
6070}
6071
6072
6073/* Opcode 0xf3 0x0f 0x61 - invalid */
6074
6075
6076/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
6077FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
6078{
6079 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6080 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
6081}
6082
6083
6084/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
6085FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
6086{
6087 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6088 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
6089}
6090
6091
6092/* Opcode 0xf3 0x0f 0x62 - invalid */
6093
6094
6095
6096/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
6097FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
6098{
6099 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6100 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
6101}
6102
6103
6104/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
6105FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
6106{
6107 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6108 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
6109}
6110
6111
6112/* Opcode 0xf3 0x0f 0x63 - invalid */
6113
6114
6115/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
6116FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
6117{
6118 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6119 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6120}
6121
6122
6123/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6124FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6125{
6126 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6127 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6128}
6129
6130
6131/* Opcode 0xf3 0x0f 0x64 - invalid */
6132
6133
6134/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6135FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6136{
6137 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6138 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6139}
6140
6141
6142/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6143FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6144{
6145 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6146 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6147}
6148
6149
6150/* Opcode 0xf3 0x0f 0x65 - invalid */
6151
6152
6153/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6154FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6155{
6156 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6157 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6158}
6159
6160
6161/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6162FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6163{
6164 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6165 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6166}
6167
6168
6169/* Opcode 0xf3 0x0f 0x66 - invalid */
6170
6171
6172/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6173FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6174{
6175 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6176 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6177}
6178
6179
6180/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6181FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6182{
6183 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6184 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6185}
6186
6187
6188/* Opcode 0xf3 0x0f 0x67 - invalid */
6189
6190
6191/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6192 * @note Intel and AMD both uses Qd for the second parameter, however they
6193 * both list it as a mmX/mem64 operand and intel describes it as being
6194 * loaded as a qword, so it should be Qq, shouldn't it? */
6195FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6196{
6197 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6198 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6199}
6200
6201
6202/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6203FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6204{
6205 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6206 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6207}
6208
6209
6210/* Opcode 0xf3 0x0f 0x68 - invalid */
6211
6212
6213/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6214 * @note Intel and AMD both uses Qd for the second parameter, however they
6215 * both list it as a mmX/mem64 operand and intel describes it as being
6216 * loaded as a qword, so it should be Qq, shouldn't it? */
6217FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6218{
6219 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6220 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6221}
6222
6223
6224/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6225FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6226{
6227 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6228 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6229
6230}
6231
6232
6233/* Opcode 0xf3 0x0f 0x69 - invalid */
6234
6235
6236/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6237 * @note Intel and AMD both uses Qd for the second parameter, however they
6238 * both list it as a mmX/mem64 operand and intel describes it as being
6239 * loaded as a qword, so it should be Qq, shouldn't it? */
6240FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6241{
6242 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6243 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6244}
6245
6246
6247/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6248FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6249{
6250 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6251 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6252}
6253
6254
6255/* Opcode 0xf3 0x0f 0x6a - invalid */
6256
6257
6258/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6259FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6260{
6261 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6262 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6263}
6264
6265
6266/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6267FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6268{
6269 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6270 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6271}
6272
6273
6274/* Opcode 0xf3 0x0f 0x6b - invalid */
6275
6276
6277/* Opcode 0x0f 0x6c - invalid */
6278
6279
6280/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6281FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6282{
6283 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6284 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6285}
6286
6287
6288/* Opcode 0xf3 0x0f 0x6c - invalid */
6289/* Opcode 0xf2 0x0f 0x6c - invalid */
6290
6291
6292/* Opcode 0x0f 0x6d - invalid */
6293
6294
6295/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6296FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6297{
6298 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6299 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6300}
6301
6302
6303/* Opcode 0xf3 0x0f 0x6d - invalid */
6304
6305
6306FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6307{
6308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6310 {
6311 /**
6312 * @opcode 0x6e
6313 * @opcodesub rex.w=1
6314 * @oppfx none
6315 * @opcpuid mmx
6316 * @opgroup og_mmx_datamove
6317 * @opxcpttype 5
6318 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6319 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6320 */
6321 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6322 if (IEM_IS_MODRM_REG_MODE(bRm))
6323 {
6324 /* MMX, greg64 */
6325 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6327 IEM_MC_LOCAL(uint64_t, u64Tmp);
6328
6329 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6330 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6331 IEM_MC_FPU_TO_MMX_MODE();
6332
6333 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6334 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6335
6336 IEM_MC_ADVANCE_RIP_AND_FINISH();
6337 IEM_MC_END();
6338 }
6339 else
6340 {
6341 /* MMX, [mem64] */
6342 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6344 IEM_MC_LOCAL(uint64_t, u64Tmp);
6345
6346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6348 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6349 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6350
6351 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6352 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6353 IEM_MC_FPU_TO_MMX_MODE();
6354
6355 IEM_MC_ADVANCE_RIP_AND_FINISH();
6356 IEM_MC_END();
6357 }
6358 }
6359 else
6360 {
6361 /**
6362 * @opdone
6363 * @opcode 0x6e
6364 * @opcodesub rex.w=0
6365 * @oppfx none
6366 * @opcpuid mmx
6367 * @opgroup og_mmx_datamove
6368 * @opxcpttype 5
6369 * @opfunction iemOp_movd_q_Pd_Ey
6370 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6371 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6372 */
6373 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6374 if (IEM_IS_MODRM_REG_MODE(bRm))
6375 {
6376 /* MMX, greg32 */
6377 IEM_MC_BEGIN(0, 1, 0, 0);
6378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6379 IEM_MC_LOCAL(uint32_t, u32Tmp);
6380
6381 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6382 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6383 IEM_MC_FPU_TO_MMX_MODE();
6384
6385 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6386 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6387
6388 IEM_MC_ADVANCE_RIP_AND_FINISH();
6389 IEM_MC_END();
6390 }
6391 else
6392 {
6393 /* MMX, [mem32] */
6394 IEM_MC_BEGIN(0, 2, 0, 0);
6395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6396 IEM_MC_LOCAL(uint32_t, u32Tmp);
6397
6398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6400 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6401 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6402
6403 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6404 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6405 IEM_MC_FPU_TO_MMX_MODE();
6406
6407 IEM_MC_ADVANCE_RIP_AND_FINISH();
6408 IEM_MC_END();
6409 }
6410 }
6411}
6412
6413FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6414{
6415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6416 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6417 {
6418 /**
6419 * @opcode 0x6e
6420 * @opcodesub rex.w=1
6421 * @oppfx 0x66
6422 * @opcpuid sse2
6423 * @opgroup og_sse2_simdint_datamove
6424 * @opxcpttype 5
6425 * @optest 64-bit / op1=1 op2=2 -> op1=2
6426 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6427 */
6428 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6429 if (IEM_IS_MODRM_REG_MODE(bRm))
6430 {
6431 /* XMM, greg64 */
6432 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6434 IEM_MC_LOCAL(uint64_t, u64Tmp);
6435
6436 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6437 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6438
6439 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6440 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6441
6442 IEM_MC_ADVANCE_RIP_AND_FINISH();
6443 IEM_MC_END();
6444 }
6445 else
6446 {
6447 /* XMM, [mem64] */
6448 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6450 IEM_MC_LOCAL(uint64_t, u64Tmp);
6451
6452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6454 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6455 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6456
6457 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6458 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6459
6460 IEM_MC_ADVANCE_RIP_AND_FINISH();
6461 IEM_MC_END();
6462 }
6463 }
6464 else
6465 {
6466 /**
6467 * @opdone
6468 * @opcode 0x6e
6469 * @opcodesub rex.w=0
6470 * @oppfx 0x66
6471 * @opcpuid sse2
6472 * @opgroup og_sse2_simdint_datamove
6473 * @opxcpttype 5
6474 * @opfunction iemOp_movd_q_Vy_Ey
6475 * @optest op1=1 op2=2 -> op1=2
6476 * @optest op1=0 op2=-42 -> op1=-42
6477 */
6478 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6479 if (IEM_IS_MODRM_REG_MODE(bRm))
6480 {
6481 /* XMM, greg32 */
6482 IEM_MC_BEGIN(0, 1, 0, 0);
6483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6484 IEM_MC_LOCAL(uint32_t, u32Tmp);
6485
6486 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6487 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6488
6489 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6490 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6491
6492 IEM_MC_ADVANCE_RIP_AND_FINISH();
6493 IEM_MC_END();
6494 }
6495 else
6496 {
6497 /* XMM, [mem32] */
6498 IEM_MC_BEGIN(0, 2, 0, 0);
6499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6500 IEM_MC_LOCAL(uint32_t, u32Tmp);
6501
6502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6504 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6505 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6506
6507 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6508 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6509
6510 IEM_MC_ADVANCE_RIP_AND_FINISH();
6511 IEM_MC_END();
6512 }
6513 }
6514}
6515
6516/* Opcode 0xf3 0x0f 0x6e - invalid */
6517
6518
6519/**
6520 * @opcode 0x6f
6521 * @oppfx none
6522 * @opcpuid mmx
6523 * @opgroup og_mmx_datamove
6524 * @opxcpttype 5
6525 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6526 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6527 */
6528FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6529{
6530 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6532 if (IEM_IS_MODRM_REG_MODE(bRm))
6533 {
6534 /*
6535 * Register, register.
6536 */
6537 IEM_MC_BEGIN(0, 1, 0, 0);
6538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6539 IEM_MC_LOCAL(uint64_t, u64Tmp);
6540
6541 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6542 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6543 IEM_MC_FPU_TO_MMX_MODE();
6544
6545 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6546 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6547
6548 IEM_MC_ADVANCE_RIP_AND_FINISH();
6549 IEM_MC_END();
6550 }
6551 else
6552 {
6553 /*
6554 * Register, memory.
6555 */
6556 IEM_MC_BEGIN(0, 2, 0, 0);
6557 IEM_MC_LOCAL(uint64_t, u64Tmp);
6558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6559
6560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6562 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6563 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6564
6565 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6566 IEM_MC_FPU_TO_MMX_MODE();
6567
6568 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6569
6570 IEM_MC_ADVANCE_RIP_AND_FINISH();
6571 IEM_MC_END();
6572 }
6573}
6574
6575/**
6576 * @opcode 0x6f
6577 * @oppfx 0x66
6578 * @opcpuid sse2
6579 * @opgroup og_sse2_simdint_datamove
6580 * @opxcpttype 1
6581 * @optest op1=1 op2=2 -> op1=2
6582 * @optest op1=0 op2=-42 -> op1=-42
6583 */
6584FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6585{
6586 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6588 if (IEM_IS_MODRM_REG_MODE(bRm))
6589 {
6590 /*
6591 * Register, register.
6592 */
6593 IEM_MC_BEGIN(0, 0, 0, 0);
6594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6595
6596 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6597 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6598
6599 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6600 IEM_GET_MODRM_RM(pVCpu, bRm));
6601 IEM_MC_ADVANCE_RIP_AND_FINISH();
6602 IEM_MC_END();
6603 }
6604 else
6605 {
6606 /*
6607 * Register, memory.
6608 */
6609 IEM_MC_BEGIN(0, 2, 0, 0);
6610 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6612
6613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6615 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6616 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6617
6618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6619 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6620
6621 IEM_MC_ADVANCE_RIP_AND_FINISH();
6622 IEM_MC_END();
6623 }
6624}
6625
6626/**
6627 * @opcode 0x6f
6628 * @oppfx 0xf3
6629 * @opcpuid sse2
6630 * @opgroup og_sse2_simdint_datamove
6631 * @opxcpttype 4UA
6632 * @optest op1=1 op2=2 -> op1=2
6633 * @optest op1=0 op2=-42 -> op1=-42
6634 */
6635FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6636{
6637 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6639 if (IEM_IS_MODRM_REG_MODE(bRm))
6640 {
6641 /*
6642 * Register, register.
6643 */
6644 IEM_MC_BEGIN(0, 0, 0, 0);
6645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6646 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6647 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6648 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6649 IEM_GET_MODRM_RM(pVCpu, bRm));
6650 IEM_MC_ADVANCE_RIP_AND_FINISH();
6651 IEM_MC_END();
6652 }
6653 else
6654 {
6655 /*
6656 * Register, memory.
6657 */
6658 IEM_MC_BEGIN(0, 2, 0, 0);
6659 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6661
6662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6665 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6666 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6667 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6668
6669 IEM_MC_ADVANCE_RIP_AND_FINISH();
6670 IEM_MC_END();
6671 }
6672}
6673
6674
6675/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6676FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6677{
6678 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6680 if (IEM_IS_MODRM_REG_MODE(bRm))
6681 {
6682 /*
6683 * Register, register.
6684 */
6685 IEM_MC_BEGIN(3, 0, 0, 0);
6686 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6688 IEM_MC_ARG(uint64_t *, pDst, 0);
6689 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6690 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6691 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6692 IEM_MC_PREPARE_FPU_USAGE();
6693 IEM_MC_FPU_TO_MMX_MODE();
6694
6695 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6696 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6697 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6698 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6699
6700 IEM_MC_ADVANCE_RIP_AND_FINISH();
6701 IEM_MC_END();
6702 }
6703 else
6704 {
6705 /*
6706 * Register, memory.
6707 */
6708 IEM_MC_BEGIN(3, 2, 0, 0);
6709 IEM_MC_ARG(uint64_t *, pDst, 0);
6710 IEM_MC_LOCAL(uint64_t, uSrc);
6711 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6713
6714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6715 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6716 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6718 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6719 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6720
6721 IEM_MC_PREPARE_FPU_USAGE();
6722 IEM_MC_FPU_TO_MMX_MODE();
6723
6724 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6725 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6726 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6727
6728 IEM_MC_ADVANCE_RIP_AND_FINISH();
6729 IEM_MC_END();
6730 }
6731}
6732
6733
6734/**
6735 * Common worker for SSE2 instructions on the forms:
6736 * pshufd xmm1, xmm2/mem128, imm8
6737 * pshufhw xmm1, xmm2/mem128, imm8
6738 * pshuflw xmm1, xmm2/mem128, imm8
6739 *
6740 * Proper alignment of the 128-bit operand is enforced.
6741 * Exceptions type 4. SSE2 cpuid checks.
6742 */
6743FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6744{
6745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6746 if (IEM_IS_MODRM_REG_MODE(bRm))
6747 {
6748 /*
6749 * Register, register.
6750 */
6751 IEM_MC_BEGIN(3, 0, 0, 0);
6752 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6754 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6755 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6756 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6757 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6758 IEM_MC_PREPARE_SSE_USAGE();
6759 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6760 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6761 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6762 IEM_MC_ADVANCE_RIP_AND_FINISH();
6763 IEM_MC_END();
6764 }
6765 else
6766 {
6767 /*
6768 * Register, memory.
6769 */
6770 IEM_MC_BEGIN(3, 2, 0, 0);
6771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6772 IEM_MC_LOCAL(RTUINT128U, uSrc);
6773 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6775
6776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6777 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6778 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6780 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6781
6782 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6783 IEM_MC_PREPARE_SSE_USAGE();
6784 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6785 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6786
6787 IEM_MC_ADVANCE_RIP_AND_FINISH();
6788 IEM_MC_END();
6789 }
6790}
6791
6792
6793/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6794FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6795{
6796 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6797 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6798}
6799
6800
6801/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6802FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6803{
6804 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6805 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6806}
6807
6808
6809/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6810FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6811{
6812 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6813 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6814}
6815
6816
6817/**
6818 * Common worker for MMX instructions of the form:
6819 * psrlw mm, imm8
6820 * psraw mm, imm8
6821 * psllw mm, imm8
6822 * psrld mm, imm8
6823 * psrad mm, imm8
6824 * pslld mm, imm8
6825 * psrlq mm, imm8
6826 * psllq mm, imm8
6827 *
6828 */
6829FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6830{
6831 if (IEM_IS_MODRM_REG_MODE(bRm))
6832 {
6833 /*
6834 * Register, immediate.
6835 */
6836 IEM_MC_BEGIN(2, 0, 0, 0);
6837 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6839 IEM_MC_ARG(uint64_t *, pDst, 0);
6840 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6841 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6842 IEM_MC_PREPARE_FPU_USAGE();
6843 IEM_MC_FPU_TO_MMX_MODE();
6844
6845 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6846 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6847 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6848
6849 IEM_MC_ADVANCE_RIP_AND_FINISH();
6850 IEM_MC_END();
6851 }
6852 else
6853 {
6854 /*
6855 * Register, memory not supported.
6856 */
6857 /// @todo Caller already enforced register mode?!
6858 AssertFailedReturn(VINF_SUCCESS);
6859 }
6860}
6861
6862
6863/**
6864 * Common worker for SSE2 instructions of the form:
6865 * psrlw xmm, imm8
6866 * psraw xmm, imm8
6867 * psllw xmm, imm8
6868 * psrld xmm, imm8
6869 * psrad xmm, imm8
6870 * pslld xmm, imm8
6871 * psrlq xmm, imm8
6872 * psllq xmm, imm8
6873 *
6874 */
6875FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6876{
6877 if (IEM_IS_MODRM_REG_MODE(bRm))
6878 {
6879 /*
6880 * Register, immediate.
6881 */
6882 IEM_MC_BEGIN(2, 0, 0, 0);
6883 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6885 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6886 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6887 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6888 IEM_MC_PREPARE_SSE_USAGE();
6889 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6890 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6891 IEM_MC_ADVANCE_RIP_AND_FINISH();
6892 IEM_MC_END();
6893 }
6894 else
6895 {
6896 /*
6897 * Register, memory.
6898 */
6899 /// @todo Caller already enforced register mode?!
6900 AssertFailedReturn(VINF_SUCCESS);
6901 }
6902}
6903
6904
6905/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6906FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6907{
6908// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6909 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6910}
6911
6912
6913/** Opcode 0x66 0x0f 0x71 11/2. */
6914FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6915{
6916// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6917 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6918}
6919
6920
6921/** Opcode 0x0f 0x71 11/4. */
6922FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6923{
6924// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6925 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6926}
6927
6928
6929/** Opcode 0x66 0x0f 0x71 11/4. */
6930FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6931{
6932// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6933 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6934}
6935
6936
6937/** Opcode 0x0f 0x71 11/6. */
6938FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6939{
6940// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6941 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6942}
6943
6944
6945/** Opcode 0x66 0x0f 0x71 11/6. */
6946FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6947{
6948// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6949 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6950}
6951
6952
6953/**
6954 * Group 12 jump table for register variant.
6955 */
6956IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6957{
6958 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6959 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6960 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6961 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6962 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6963 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6964 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6965 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6966};
6967AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6968
6969
6970/** Opcode 0x0f 0x71. */
6971FNIEMOP_DEF(iemOp_Grp12)
6972{
6973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6974 if (IEM_IS_MODRM_REG_MODE(bRm))
6975 /* register, register */
6976 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6977 + pVCpu->iem.s.idxPrefix], bRm);
6978 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6979}
6980
6981
6982/** Opcode 0x0f 0x72 11/2. */
6983FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6984{
6985// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6986 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6987}
6988
6989
6990/** Opcode 0x66 0x0f 0x72 11/2. */
6991FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6992{
6993// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6994 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6995}
6996
6997
6998/** Opcode 0x0f 0x72 11/4. */
6999FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
7000{
7001// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
7002 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
7003}
7004
7005
7006/** Opcode 0x66 0x0f 0x72 11/4. */
7007FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
7008{
7009// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7010 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
7011}
7012
7013
7014/** Opcode 0x0f 0x72 11/6. */
7015FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
7016{
7017// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
7018 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
7019}
7020
7021/** Opcode 0x66 0x0f 0x72 11/6. */
7022FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
7023{
7024// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7025 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
7026}
7027
7028
7029/**
7030 * Group 13 jump table for register variant.
7031 */
7032IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
7033{
7034 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7035 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7036 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7037 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7038 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7039 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7040 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7041 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
7042};
7043AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
7044
7045/** Opcode 0x0f 0x72. */
7046FNIEMOP_DEF(iemOp_Grp13)
7047{
7048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7049 if (IEM_IS_MODRM_REG_MODE(bRm))
7050 /* register, register */
7051 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7052 + pVCpu->iem.s.idxPrefix], bRm);
7053 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7054}
7055
7056
7057/** Opcode 0x0f 0x73 11/2. */
7058FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
7059{
7060// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
7061 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
7062}
7063
7064
7065/** Opcode 0x66 0x0f 0x73 11/2. */
7066FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
7067{
7068// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7069 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
7070}
7071
7072
7073/** Opcode 0x66 0x0f 0x73 11/3. */
7074FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
7075{
7076// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7077 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
7078}
7079
7080
7081/** Opcode 0x0f 0x73 11/6. */
7082FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
7083{
7084// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
7085 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
7086}
7087
7088
7089/** Opcode 0x66 0x0f 0x73 11/6. */
7090FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
7091{
7092// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7093 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
7094}
7095
7096
7097/** Opcode 0x66 0x0f 0x73 11/7. */
7098FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
7099{
7100// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7101 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
7102}
7103
7104/**
7105 * Group 14 jump table for register variant.
7106 */
7107IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
7108{
7109 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7110 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7111 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7112 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7113 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7114 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7115 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7116 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7117};
7118AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
7119
7120
7121/** Opcode 0x0f 0x73. */
7122FNIEMOP_DEF(iemOp_Grp14)
7123{
7124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7125 if (IEM_IS_MODRM_REG_MODE(bRm))
7126 /* register, register */
7127 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7128 + pVCpu->iem.s.idxPrefix], bRm);
7129 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7130}
7131
7132
7133/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7134FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7135{
7136 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7137 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7138}
7139
7140
7141/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7142FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7143{
7144 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7145 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7146}
7147
7148
7149/* Opcode 0xf3 0x0f 0x74 - invalid */
7150/* Opcode 0xf2 0x0f 0x74 - invalid */
7151
7152
7153/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7154FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7155{
7156 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7157 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7158}
7159
7160
7161/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7162FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7163{
7164 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7165 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7166}
7167
7168
7169/* Opcode 0xf3 0x0f 0x75 - invalid */
7170/* Opcode 0xf2 0x0f 0x75 - invalid */
7171
7172
7173/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7174FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7175{
7176 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7177 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7178}
7179
7180
7181/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7182FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7183{
7184 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7185 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7186}
7187
7188
7189/* Opcode 0xf3 0x0f 0x76 - invalid */
7190/* Opcode 0xf2 0x0f 0x76 - invalid */
7191
7192
7193/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7194FNIEMOP_DEF(iemOp_emms)
7195{
7196 IEMOP_MNEMONIC(emms, "emms");
7197 IEM_MC_BEGIN(0, 0, 0, 0);
7198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7199 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7200 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7201 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7202 IEM_MC_FPU_FROM_MMX_MODE();
7203 IEM_MC_ADVANCE_RIP_AND_FINISH();
7204 IEM_MC_END();
7205}
7206
7207/* Opcode 0x66 0x0f 0x77 - invalid */
7208/* Opcode 0xf3 0x0f 0x77 - invalid */
7209/* Opcode 0xf2 0x0f 0x77 - invalid */
7210
7211/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7212#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7213FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7214{
7215 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7216 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7217 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7218 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7219
7220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7221 if (IEM_IS_MODRM_REG_MODE(bRm))
7222 {
7223 /*
7224 * Register, register.
7225 */
7226 if (enmEffOpSize == IEMMODE_64BIT)
7227 {
7228 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7229 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7230 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7231 IEM_MC_ARG(uint64_t, u64Enc, 1);
7232 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7233 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7234 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7235 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
7236 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7237 IEM_MC_END();
7238 }
7239 else
7240 {
7241 IEM_MC_BEGIN(2, 0, 0, 0);
7242 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7243 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7244 IEM_MC_ARG(uint32_t, u32Enc, 1);
7245 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7246 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7247 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7248 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
7249 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7250 IEM_MC_END();
7251 }
7252 }
7253 else
7254 {
7255 /*
7256 * Memory, register.
7257 */
7258 if (enmEffOpSize == IEMMODE_64BIT)
7259 {
7260 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7261 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7263 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7264 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7265 IEM_MC_ARG(uint64_t, u64Enc, 2);
7266 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7267 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7268 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7269 IEM_MC_END();
7270 }
7271 else
7272 {
7273 IEM_MC_BEGIN(3, 0, 0, 0);
7274 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7276 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7277 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7278 IEM_MC_ARG(uint32_t, u32Enc, 2);
7279 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7280 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7281 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7282 IEM_MC_END();
7283 }
7284 }
7285}
7286#else
7287FNIEMOP_UD_STUB(iemOp_vmread_Ey_Gy);
7288#endif
7289
7290/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7291FNIEMOP_STUB(iemOp_AmdGrp17);
7292/* Opcode 0xf3 0x0f 0x78 - invalid */
7293/* Opcode 0xf2 0x0f 0x78 - invalid */
7294
7295/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7296#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7297FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7298{
7299 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7300 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7301 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7302 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7303
7304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7305 if (IEM_IS_MODRM_REG_MODE(bRm))
7306 {
7307 /*
7308 * Register, register.
7309 */
7310 if (enmEffOpSize == IEMMODE_64BIT)
7311 {
7312 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7313 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7314 IEM_MC_ARG(uint64_t, u64Val, 0);
7315 IEM_MC_ARG(uint64_t, u64Enc, 1);
7316 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7317 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7319 IEM_MC_END();
7320 }
7321 else
7322 {
7323 IEM_MC_BEGIN(2, 0, 0, 0);
7324 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7325 IEM_MC_ARG(uint32_t, u32Val, 0);
7326 IEM_MC_ARG(uint32_t, u32Enc, 1);
7327 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7328 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7329 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7330 IEM_MC_END();
7331 }
7332 }
7333 else
7334 {
7335 /*
7336 * Register, memory.
7337 */
7338 if (enmEffOpSize == IEMMODE_64BIT)
7339 {
7340 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7341 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7343 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7344 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7345 IEM_MC_ARG(uint64_t, u64Enc, 2);
7346 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7347 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7348 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7349 IEM_MC_END();
7350 }
7351 else
7352 {
7353 IEM_MC_BEGIN(3, 0, 0, 0);
7354 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7356 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7357 IEM_MC_ARG(uint32_t, u32Enc, 2);
7358 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7359 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7360 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7361 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7362 IEM_MC_END();
7363 }
7364 }
7365}
7366#else
7367FNIEMOP_UD_STUB(iemOp_vmwrite_Gy_Ey);
7368#endif
7369/* Opcode 0x66 0x0f 0x79 - invalid */
7370/* Opcode 0xf3 0x0f 0x79 - invalid */
7371/* Opcode 0xf2 0x0f 0x79 - invalid */
7372
7373/* Opcode 0x0f 0x7a - invalid */
7374/* Opcode 0x66 0x0f 0x7a - invalid */
7375/* Opcode 0xf3 0x0f 0x7a - invalid */
7376/* Opcode 0xf2 0x0f 0x7a - invalid */
7377
7378/* Opcode 0x0f 0x7b - invalid */
7379/* Opcode 0x66 0x0f 0x7b - invalid */
7380/* Opcode 0xf3 0x0f 0x7b - invalid */
7381/* Opcode 0xf2 0x0f 0x7b - invalid */
7382
7383/* Opcode 0x0f 0x7c - invalid */
7384
7385
7386/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7387FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7388{
7389 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7390 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7391}
7392
7393
7394/* Opcode 0xf3 0x0f 0x7c - invalid */
7395
7396
7397/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7398FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7399{
7400 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7401 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7402}
7403
7404
7405/* Opcode 0x0f 0x7d - invalid */
7406
7407
7408/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7409FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7410{
7411 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7412 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7413}
7414
7415
7416/* Opcode 0xf3 0x0f 0x7d - invalid */
7417
7418
7419/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7420FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7421{
7422 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7423 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7424}
7425
7426
7427/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7428FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7429{
7430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7431 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7432 {
7433 /**
7434 * @opcode 0x7e
7435 * @opcodesub rex.w=1
7436 * @oppfx none
7437 * @opcpuid mmx
7438 * @opgroup og_mmx_datamove
7439 * @opxcpttype 5
7440 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7441 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7442 */
7443 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7444 if (IEM_IS_MODRM_REG_MODE(bRm))
7445 {
7446 /* greg64, MMX */
7447 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7449 IEM_MC_LOCAL(uint64_t, u64Tmp);
7450
7451 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7452 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7453 IEM_MC_FPU_TO_MMX_MODE();
7454
7455 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7456 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7457
7458 IEM_MC_ADVANCE_RIP_AND_FINISH();
7459 IEM_MC_END();
7460 }
7461 else
7462 {
7463 /* [mem64], MMX */
7464 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7466 IEM_MC_LOCAL(uint64_t, u64Tmp);
7467
7468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7470 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7471 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7472
7473 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7474 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7475 IEM_MC_FPU_TO_MMX_MODE();
7476
7477 IEM_MC_ADVANCE_RIP_AND_FINISH();
7478 IEM_MC_END();
7479 }
7480 }
7481 else
7482 {
7483 /**
7484 * @opdone
7485 * @opcode 0x7e
7486 * @opcodesub rex.w=0
7487 * @oppfx none
7488 * @opcpuid mmx
7489 * @opgroup og_mmx_datamove
7490 * @opxcpttype 5
7491 * @opfunction iemOp_movd_q_Pd_Ey
7492 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7493 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7494 */
7495 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7496 if (IEM_IS_MODRM_REG_MODE(bRm))
7497 {
7498 /* greg32, MMX */
7499 IEM_MC_BEGIN(0, 1, 0, 0);
7500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7501 IEM_MC_LOCAL(uint32_t, u32Tmp);
7502
7503 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7504 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7505 IEM_MC_FPU_TO_MMX_MODE();
7506
7507 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7508 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7509
7510 IEM_MC_ADVANCE_RIP_AND_FINISH();
7511 IEM_MC_END();
7512 }
7513 else
7514 {
7515 /* [mem32], MMX */
7516 IEM_MC_BEGIN(0, 2, 0, 0);
7517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7518 IEM_MC_LOCAL(uint32_t, u32Tmp);
7519
7520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7522 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7523 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7524
7525 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7526 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7527 IEM_MC_FPU_TO_MMX_MODE();
7528
7529 IEM_MC_ADVANCE_RIP_AND_FINISH();
7530 IEM_MC_END();
7531 }
7532 }
7533}
7534
7535
7536FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7537{
7538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7539 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7540 {
7541 /**
7542 * @opcode 0x7e
7543 * @opcodesub rex.w=1
7544 * @oppfx 0x66
7545 * @opcpuid sse2
7546 * @opgroup og_sse2_simdint_datamove
7547 * @opxcpttype 5
7548 * @optest 64-bit / op1=1 op2=2 -> op1=2
7549 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7550 */
7551 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7552 if (IEM_IS_MODRM_REG_MODE(bRm))
7553 {
7554 /* greg64, XMM */
7555 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7557 IEM_MC_LOCAL(uint64_t, u64Tmp);
7558
7559 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7560 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7561
7562 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7563 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7564
7565 IEM_MC_ADVANCE_RIP_AND_FINISH();
7566 IEM_MC_END();
7567 }
7568 else
7569 {
7570 /* [mem64], XMM */
7571 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7573 IEM_MC_LOCAL(uint64_t, u64Tmp);
7574
7575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7577 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7579
7580 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7581 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7582
7583 IEM_MC_ADVANCE_RIP_AND_FINISH();
7584 IEM_MC_END();
7585 }
7586 }
7587 else
7588 {
7589 /**
7590 * @opdone
7591 * @opcode 0x7e
7592 * @opcodesub rex.w=0
7593 * @oppfx 0x66
7594 * @opcpuid sse2
7595 * @opgroup og_sse2_simdint_datamove
7596 * @opxcpttype 5
7597 * @opfunction iemOp_movd_q_Vy_Ey
7598 * @optest op1=1 op2=2 -> op1=2
7599 * @optest op1=0 op2=-42 -> op1=-42
7600 */
7601 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7602 if (IEM_IS_MODRM_REG_MODE(bRm))
7603 {
7604 /* greg32, XMM */
7605 IEM_MC_BEGIN(0, 1, 0, 0);
7606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7607 IEM_MC_LOCAL(uint32_t, u32Tmp);
7608
7609 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7610 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7611
7612 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7613 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7614
7615 IEM_MC_ADVANCE_RIP_AND_FINISH();
7616 IEM_MC_END();
7617 }
7618 else
7619 {
7620 /* [mem32], XMM */
7621 IEM_MC_BEGIN(0, 2, 0, 0);
7622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7623 IEM_MC_LOCAL(uint32_t, u32Tmp);
7624
7625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7627 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7628 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7629
7630 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7631 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7632
7633 IEM_MC_ADVANCE_RIP_AND_FINISH();
7634 IEM_MC_END();
7635 }
7636 }
7637}
7638
7639/**
7640 * @opcode 0x7e
7641 * @oppfx 0xf3
7642 * @opcpuid sse2
7643 * @opgroup og_sse2_pcksclr_datamove
7644 * @opxcpttype none
7645 * @optest op1=1 op2=2 -> op1=2
7646 * @optest op1=0 op2=-42 -> op1=-42
7647 */
7648FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7649{
7650 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7652 if (IEM_IS_MODRM_REG_MODE(bRm))
7653 {
7654 /*
7655 * XMM128, XMM64.
7656 */
7657 IEM_MC_BEGIN(0, 2, 0, 0);
7658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7659 IEM_MC_LOCAL(uint64_t, uSrc);
7660
7661 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7662 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7663
7664 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7665 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7666
7667 IEM_MC_ADVANCE_RIP_AND_FINISH();
7668 IEM_MC_END();
7669 }
7670 else
7671 {
7672 /*
7673 * XMM128, [mem64].
7674 */
7675 IEM_MC_BEGIN(0, 2, 0, 0);
7676 IEM_MC_LOCAL(uint64_t, uSrc);
7677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7678
7679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7681 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7683
7684 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7685 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7686
7687 IEM_MC_ADVANCE_RIP_AND_FINISH();
7688 IEM_MC_END();
7689 }
7690}
7691
7692/* Opcode 0xf2 0x0f 0x7e - invalid */
7693
7694
7695/** Opcode 0x0f 0x7f - movq Qq, Pq */
7696FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7697{
7698 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7700 if (IEM_IS_MODRM_REG_MODE(bRm))
7701 {
7702 /*
7703 * MMX, MMX.
7704 */
7705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7707 IEM_MC_BEGIN(0, 1, 0, 0);
7708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7709 IEM_MC_LOCAL(uint64_t, u64Tmp);
7710 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7711 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7712 IEM_MC_FPU_TO_MMX_MODE();
7713
7714 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7715 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7716
7717 IEM_MC_ADVANCE_RIP_AND_FINISH();
7718 IEM_MC_END();
7719 }
7720 else
7721 {
7722 /*
7723 * [mem64], MMX.
7724 */
7725 IEM_MC_BEGIN(0, 2, 0, 0);
7726 IEM_MC_LOCAL(uint64_t, u64Tmp);
7727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7728
7729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7731 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7732 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7733
7734 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7735 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7736 IEM_MC_FPU_TO_MMX_MODE();
7737
7738 IEM_MC_ADVANCE_RIP_AND_FINISH();
7739 IEM_MC_END();
7740 }
7741}
7742
7743/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7744FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7745{
7746 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7748 if (IEM_IS_MODRM_REG_MODE(bRm))
7749 {
7750 /*
7751 * XMM, XMM.
7752 */
7753 IEM_MC_BEGIN(0, 0, 0, 0);
7754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7755 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7756 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7757 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7758 IEM_GET_MODRM_REG(pVCpu, bRm));
7759 IEM_MC_ADVANCE_RIP_AND_FINISH();
7760 IEM_MC_END();
7761 }
7762 else
7763 {
7764 /*
7765 * [mem128], XMM.
7766 */
7767 IEM_MC_BEGIN(0, 2, 0, 0);
7768 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7770
7771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7774 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7775
7776 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7777 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7778
7779 IEM_MC_ADVANCE_RIP_AND_FINISH();
7780 IEM_MC_END();
7781 }
7782}
7783
7784/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7785FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7786{
7787 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7789 if (IEM_IS_MODRM_REG_MODE(bRm))
7790 {
7791 /*
7792 * XMM, XMM.
7793 */
7794 IEM_MC_BEGIN(0, 0, 0, 0);
7795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7796 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7798 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7799 IEM_GET_MODRM_REG(pVCpu, bRm));
7800 IEM_MC_ADVANCE_RIP_AND_FINISH();
7801 IEM_MC_END();
7802 }
7803 else
7804 {
7805 /*
7806 * [mem128], XMM.
7807 */
7808 IEM_MC_BEGIN(0, 2, 0, 0);
7809 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7811
7812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7814 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7815 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7816
7817 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7818 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7819
7820 IEM_MC_ADVANCE_RIP_AND_FINISH();
7821 IEM_MC_END();
7822 }
7823}
7824
7825/* Opcode 0xf2 0x0f 0x7f - invalid */
7826
7827
7828/**
7829 * @opcode 0x80
7830 * @opfltest of
7831 */
7832FNIEMOP_DEF(iemOp_jo_Jv)
7833{
7834 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7835 IEMOP_HLP_MIN_386();
7836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7837 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7838 {
7839 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7840 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7843 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7844 } IEM_MC_ELSE() {
7845 IEM_MC_ADVANCE_RIP_AND_FINISH();
7846 } IEM_MC_ENDIF();
7847 IEM_MC_END();
7848 }
7849 else
7850 {
7851 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7852 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7854 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7855 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7856 } IEM_MC_ELSE() {
7857 IEM_MC_ADVANCE_RIP_AND_FINISH();
7858 } IEM_MC_ENDIF();
7859 IEM_MC_END();
7860 }
7861}
7862
7863
7864/**
7865 * @opcode 0x81
7866 * @opfltest of
7867 */
7868FNIEMOP_DEF(iemOp_jno_Jv)
7869{
7870 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7871 IEMOP_HLP_MIN_386();
7872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7873 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7874 {
7875 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7876 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7879 IEM_MC_ADVANCE_RIP_AND_FINISH();
7880 } IEM_MC_ELSE() {
7881 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7882 } IEM_MC_ENDIF();
7883 IEM_MC_END();
7884 }
7885 else
7886 {
7887 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7888 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7890 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7891 IEM_MC_ADVANCE_RIP_AND_FINISH();
7892 } IEM_MC_ELSE() {
7893 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7894 } IEM_MC_ENDIF();
7895 IEM_MC_END();
7896 }
7897}
7898
7899
7900/**
7901 * @opcode 0x82
7902 * @opfltest cf
7903 */
7904FNIEMOP_DEF(iemOp_jc_Jv)
7905{
7906 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7907 IEMOP_HLP_MIN_386();
7908 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7909 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7910 {
7911 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7912 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7915 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7916 } IEM_MC_ELSE() {
7917 IEM_MC_ADVANCE_RIP_AND_FINISH();
7918 } IEM_MC_ENDIF();
7919 IEM_MC_END();
7920 }
7921 else
7922 {
7923 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7924 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7927 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7928 } IEM_MC_ELSE() {
7929 IEM_MC_ADVANCE_RIP_AND_FINISH();
7930 } IEM_MC_ENDIF();
7931 IEM_MC_END();
7932 }
7933}
7934
7935
7936/**
7937 * @opcode 0x83
7938 * @opfltest cf
7939 */
7940FNIEMOP_DEF(iemOp_jnc_Jv)
7941{
7942 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7943 IEMOP_HLP_MIN_386();
7944 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7945 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7946 {
7947 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7948 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7951 IEM_MC_ADVANCE_RIP_AND_FINISH();
7952 } IEM_MC_ELSE() {
7953 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7954 } IEM_MC_ENDIF();
7955 IEM_MC_END();
7956 }
7957 else
7958 {
7959 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7960 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7962 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7963 IEM_MC_ADVANCE_RIP_AND_FINISH();
7964 } IEM_MC_ELSE() {
7965 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7966 } IEM_MC_ENDIF();
7967 IEM_MC_END();
7968 }
7969}
7970
7971
7972/**
7973 * @opcode 0x84
7974 * @opfltest zf
7975 */
7976FNIEMOP_DEF(iemOp_je_Jv)
7977{
7978 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7979 IEMOP_HLP_MIN_386();
7980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7981 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7982 {
7983 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7984 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7987 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7988 } IEM_MC_ELSE() {
7989 IEM_MC_ADVANCE_RIP_AND_FINISH();
7990 } IEM_MC_ENDIF();
7991 IEM_MC_END();
7992 }
7993 else
7994 {
7995 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7996 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7999 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8000 } IEM_MC_ELSE() {
8001 IEM_MC_ADVANCE_RIP_AND_FINISH();
8002 } IEM_MC_ENDIF();
8003 IEM_MC_END();
8004 }
8005}
8006
8007
8008/**
8009 * @opcode 0x85
8010 * @opfltest zf
8011 */
8012FNIEMOP_DEF(iemOp_jne_Jv)
8013{
8014 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
8015 IEMOP_HLP_MIN_386();
8016 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8017 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8018 {
8019 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8020 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8022 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8023 IEM_MC_ADVANCE_RIP_AND_FINISH();
8024 } IEM_MC_ELSE() {
8025 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8026 } IEM_MC_ENDIF();
8027 IEM_MC_END();
8028 }
8029 else
8030 {
8031 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8032 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8034 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8035 IEM_MC_ADVANCE_RIP_AND_FINISH();
8036 } IEM_MC_ELSE() {
8037 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8038 } IEM_MC_ENDIF();
8039 IEM_MC_END();
8040 }
8041}
8042
8043
8044/**
8045 * @opcode 0x86
8046 * @opfltest cf,zf
8047 */
8048FNIEMOP_DEF(iemOp_jbe_Jv)
8049{
8050 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
8051 IEMOP_HLP_MIN_386();
8052 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8053 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8054 {
8055 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8056 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8058 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8059 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8060 } IEM_MC_ELSE() {
8061 IEM_MC_ADVANCE_RIP_AND_FINISH();
8062 } IEM_MC_ENDIF();
8063 IEM_MC_END();
8064 }
8065 else
8066 {
8067 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8068 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8070 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8071 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8072 } IEM_MC_ELSE() {
8073 IEM_MC_ADVANCE_RIP_AND_FINISH();
8074 } IEM_MC_ENDIF();
8075 IEM_MC_END();
8076 }
8077}
8078
8079
8080/**
8081 * @opcode 0x87
8082 * @opfltest cf,zf
8083 */
8084FNIEMOP_DEF(iemOp_jnbe_Jv)
8085{
8086 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
8087 IEMOP_HLP_MIN_386();
8088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8089 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8090 {
8091 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8092 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8094 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8095 IEM_MC_ADVANCE_RIP_AND_FINISH();
8096 } IEM_MC_ELSE() {
8097 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8098 } IEM_MC_ENDIF();
8099 IEM_MC_END();
8100 }
8101 else
8102 {
8103 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8104 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8106 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8107 IEM_MC_ADVANCE_RIP_AND_FINISH();
8108 } IEM_MC_ELSE() {
8109 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8110 } IEM_MC_ENDIF();
8111 IEM_MC_END();
8112 }
8113}
8114
8115
8116/**
8117 * @opcode 0x88
8118 * @opfltest sf
8119 */
8120FNIEMOP_DEF(iemOp_js_Jv)
8121{
8122 IEMOP_MNEMONIC(js_Jv, "js Jv");
8123 IEMOP_HLP_MIN_386();
8124 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8125 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8126 {
8127 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8128 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8130 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8131 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8132 } IEM_MC_ELSE() {
8133 IEM_MC_ADVANCE_RIP_AND_FINISH();
8134 } IEM_MC_ENDIF();
8135 IEM_MC_END();
8136 }
8137 else
8138 {
8139 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8140 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8142 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8143 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8144 } IEM_MC_ELSE() {
8145 IEM_MC_ADVANCE_RIP_AND_FINISH();
8146 } IEM_MC_ENDIF();
8147 IEM_MC_END();
8148 }
8149}
8150
8151
8152/**
8153 * @opcode 0x89
8154 * @opfltest sf
8155 */
8156FNIEMOP_DEF(iemOp_jns_Jv)
8157{
8158 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8159 IEMOP_HLP_MIN_386();
8160 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8161 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8162 {
8163 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8164 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8166 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8167 IEM_MC_ADVANCE_RIP_AND_FINISH();
8168 } IEM_MC_ELSE() {
8169 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8170 } IEM_MC_ENDIF();
8171 IEM_MC_END();
8172 }
8173 else
8174 {
8175 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8176 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8178 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8179 IEM_MC_ADVANCE_RIP_AND_FINISH();
8180 } IEM_MC_ELSE() {
8181 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8182 } IEM_MC_ENDIF();
8183 IEM_MC_END();
8184 }
8185}
8186
8187
8188/**
8189 * @opcode 0x8a
8190 * @opfltest pf
8191 */
8192FNIEMOP_DEF(iemOp_jp_Jv)
8193{
8194 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8195 IEMOP_HLP_MIN_386();
8196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8197 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8198 {
8199 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8200 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8202 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8203 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8204 } IEM_MC_ELSE() {
8205 IEM_MC_ADVANCE_RIP_AND_FINISH();
8206 } IEM_MC_ENDIF();
8207 IEM_MC_END();
8208 }
8209 else
8210 {
8211 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8212 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8214 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8215 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8216 } IEM_MC_ELSE() {
8217 IEM_MC_ADVANCE_RIP_AND_FINISH();
8218 } IEM_MC_ENDIF();
8219 IEM_MC_END();
8220 }
8221}
8222
8223
8224/**
8225 * @opcode 0x8b
8226 * @opfltest pf
8227 */
8228FNIEMOP_DEF(iemOp_jnp_Jv)
8229{
8230 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8231 IEMOP_HLP_MIN_386();
8232 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8233 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8234 {
8235 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8236 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8238 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8239 IEM_MC_ADVANCE_RIP_AND_FINISH();
8240 } IEM_MC_ELSE() {
8241 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8242 } IEM_MC_ENDIF();
8243 IEM_MC_END();
8244 }
8245 else
8246 {
8247 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8248 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8250 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8251 IEM_MC_ADVANCE_RIP_AND_FINISH();
8252 } IEM_MC_ELSE() {
8253 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8254 } IEM_MC_ENDIF();
8255 IEM_MC_END();
8256 }
8257}
8258
8259
8260/**
8261 * @opcode 0x8c
8262 * @opfltest sf,of
8263 */
8264FNIEMOP_DEF(iemOp_jl_Jv)
8265{
8266 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8267 IEMOP_HLP_MIN_386();
8268 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8269 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8270 {
8271 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8272 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8274 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8275 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8276 } IEM_MC_ELSE() {
8277 IEM_MC_ADVANCE_RIP_AND_FINISH();
8278 } IEM_MC_ENDIF();
8279 IEM_MC_END();
8280 }
8281 else
8282 {
8283 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8284 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8286 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8287 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8288 } IEM_MC_ELSE() {
8289 IEM_MC_ADVANCE_RIP_AND_FINISH();
8290 } IEM_MC_ENDIF();
8291 IEM_MC_END();
8292 }
8293}
8294
8295
8296/**
8297 * @opcode 0x8d
8298 * @opfltest sf,of
8299 */
8300FNIEMOP_DEF(iemOp_jnl_Jv)
8301{
8302 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8303 IEMOP_HLP_MIN_386();
8304 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8305 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8306 {
8307 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8308 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8310 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8311 IEM_MC_ADVANCE_RIP_AND_FINISH();
8312 } IEM_MC_ELSE() {
8313 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8314 } IEM_MC_ENDIF();
8315 IEM_MC_END();
8316 }
8317 else
8318 {
8319 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8320 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8322 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8323 IEM_MC_ADVANCE_RIP_AND_FINISH();
8324 } IEM_MC_ELSE() {
8325 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8326 } IEM_MC_ENDIF();
8327 IEM_MC_END();
8328 }
8329}
8330
8331
8332/**
8333 * @opcode 0x8e
8334 * @opfltest zf,sf,of
8335 */
8336FNIEMOP_DEF(iemOp_jle_Jv)
8337{
8338 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8339 IEMOP_HLP_MIN_386();
8340 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8341 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8342 {
8343 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8344 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8346 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8347 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8348 } IEM_MC_ELSE() {
8349 IEM_MC_ADVANCE_RIP_AND_FINISH();
8350 } IEM_MC_ENDIF();
8351 IEM_MC_END();
8352 }
8353 else
8354 {
8355 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8356 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8358 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8359 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8360 } IEM_MC_ELSE() {
8361 IEM_MC_ADVANCE_RIP_AND_FINISH();
8362 } IEM_MC_ENDIF();
8363 IEM_MC_END();
8364 }
8365}
8366
8367
8368/**
8369 * @opcode 0x8f
8370 * @opfltest zf,sf,of
8371 */
8372FNIEMOP_DEF(iemOp_jnle_Jv)
8373{
8374 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8375 IEMOP_HLP_MIN_386();
8376 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8377 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8378 {
8379 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8380 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8382 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8383 IEM_MC_ADVANCE_RIP_AND_FINISH();
8384 } IEM_MC_ELSE() {
8385 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8386 } IEM_MC_ENDIF();
8387 IEM_MC_END();
8388 }
8389 else
8390 {
8391 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8392 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8394 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8395 IEM_MC_ADVANCE_RIP_AND_FINISH();
8396 } IEM_MC_ELSE() {
8397 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8398 } IEM_MC_ENDIF();
8399 IEM_MC_END();
8400 }
8401}
8402
8403
8404/**
8405 * @opcode 0x90
8406 * @opfltest of
8407 */
8408FNIEMOP_DEF(iemOp_seto_Eb)
8409{
8410 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8411 IEMOP_HLP_MIN_386();
8412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8413
8414 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8415 * any way. AMD says it's "unused", whatever that means. We're
8416 * ignoring for now. */
8417 if (IEM_IS_MODRM_REG_MODE(bRm))
8418 {
8419 /* register target */
8420 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8422 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8423 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8424 } IEM_MC_ELSE() {
8425 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8426 } IEM_MC_ENDIF();
8427 IEM_MC_ADVANCE_RIP_AND_FINISH();
8428 IEM_MC_END();
8429 }
8430 else
8431 {
8432 /* memory target */
8433 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8438 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8439 } IEM_MC_ELSE() {
8440 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8441 } IEM_MC_ENDIF();
8442 IEM_MC_ADVANCE_RIP_AND_FINISH();
8443 IEM_MC_END();
8444 }
8445}
8446
8447
8448/**
8449 * @opcode 0x91
8450 * @opfltest of
8451 */
8452FNIEMOP_DEF(iemOp_setno_Eb)
8453{
8454 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8455 IEMOP_HLP_MIN_386();
8456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8457
8458 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8459 * any way. AMD says it's "unused", whatever that means. We're
8460 * ignoring for now. */
8461 if (IEM_IS_MODRM_REG_MODE(bRm))
8462 {
8463 /* register target */
8464 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8466 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8467 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8468 } IEM_MC_ELSE() {
8469 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8470 } IEM_MC_ENDIF();
8471 IEM_MC_ADVANCE_RIP_AND_FINISH();
8472 IEM_MC_END();
8473 }
8474 else
8475 {
8476 /* memory target */
8477 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8482 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8483 } IEM_MC_ELSE() {
8484 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8485 } IEM_MC_ENDIF();
8486 IEM_MC_ADVANCE_RIP_AND_FINISH();
8487 IEM_MC_END();
8488 }
8489}
8490
8491
8492/**
8493 * @opcode 0x92
8494 * @opfltest cf
8495 */
8496FNIEMOP_DEF(iemOp_setc_Eb)
8497{
8498 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8499 IEMOP_HLP_MIN_386();
8500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8501
8502 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8503 * any way. AMD says it's "unused", whatever that means. We're
8504 * ignoring for now. */
8505 if (IEM_IS_MODRM_REG_MODE(bRm))
8506 {
8507 /* register target */
8508 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8511 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8512 } IEM_MC_ELSE() {
8513 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8514 } IEM_MC_ENDIF();
8515 IEM_MC_ADVANCE_RIP_AND_FINISH();
8516 IEM_MC_END();
8517 }
8518 else
8519 {
8520 /* memory target */
8521 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8526 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8527 } IEM_MC_ELSE() {
8528 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8529 } IEM_MC_ENDIF();
8530 IEM_MC_ADVANCE_RIP_AND_FINISH();
8531 IEM_MC_END();
8532 }
8533}
8534
8535
8536/**
8537 * @opcode 0x93
8538 * @opfltest cf
8539 */
8540FNIEMOP_DEF(iemOp_setnc_Eb)
8541{
8542 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8543 IEMOP_HLP_MIN_386();
8544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8545
8546 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8547 * any way. AMD says it's "unused", whatever that means. We're
8548 * ignoring for now. */
8549 if (IEM_IS_MODRM_REG_MODE(bRm))
8550 {
8551 /* register target */
8552 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8554 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8555 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8556 } IEM_MC_ELSE() {
8557 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8558 } IEM_MC_ENDIF();
8559 IEM_MC_ADVANCE_RIP_AND_FINISH();
8560 IEM_MC_END();
8561 }
8562 else
8563 {
8564 /* memory target */
8565 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8570 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8571 } IEM_MC_ELSE() {
8572 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8573 } IEM_MC_ENDIF();
8574 IEM_MC_ADVANCE_RIP_AND_FINISH();
8575 IEM_MC_END();
8576 }
8577}
8578
8579
8580/**
8581 * @opcode 0x94
8582 * @opfltest zf
8583 */
8584FNIEMOP_DEF(iemOp_sete_Eb)
8585{
8586 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8587 IEMOP_HLP_MIN_386();
8588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8589
8590 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8591 * any way. AMD says it's "unused", whatever that means. We're
8592 * ignoring for now. */
8593 if (IEM_IS_MODRM_REG_MODE(bRm))
8594 {
8595 /* register target */
8596 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8599 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8600 } IEM_MC_ELSE() {
8601 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8602 } IEM_MC_ENDIF();
8603 IEM_MC_ADVANCE_RIP_AND_FINISH();
8604 IEM_MC_END();
8605 }
8606 else
8607 {
8608 /* memory target */
8609 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8614 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8615 } IEM_MC_ELSE() {
8616 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8617 } IEM_MC_ENDIF();
8618 IEM_MC_ADVANCE_RIP_AND_FINISH();
8619 IEM_MC_END();
8620 }
8621}
8622
8623
8624/**
8625 * @opcode 0x95
8626 * @opfltest zf
8627 */
8628FNIEMOP_DEF(iemOp_setne_Eb)
8629{
8630 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8631 IEMOP_HLP_MIN_386();
8632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8633
8634 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8635 * any way. AMD says it's "unused", whatever that means. We're
8636 * ignoring for now. */
8637 if (IEM_IS_MODRM_REG_MODE(bRm))
8638 {
8639 /* register target */
8640 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8643 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8644 } IEM_MC_ELSE() {
8645 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8646 } IEM_MC_ENDIF();
8647 IEM_MC_ADVANCE_RIP_AND_FINISH();
8648 IEM_MC_END();
8649 }
8650 else
8651 {
8652 /* memory target */
8653 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8658 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8659 } IEM_MC_ELSE() {
8660 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8661 } IEM_MC_ENDIF();
8662 IEM_MC_ADVANCE_RIP_AND_FINISH();
8663 IEM_MC_END();
8664 }
8665}
8666
8667
8668/**
8669 * @opcode 0x96
8670 * @opfltest cf,zf
8671 */
8672FNIEMOP_DEF(iemOp_setbe_Eb)
8673{
8674 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8675 IEMOP_HLP_MIN_386();
8676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8677
8678 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8679 * any way. AMD says it's "unused", whatever that means. We're
8680 * ignoring for now. */
8681 if (IEM_IS_MODRM_REG_MODE(bRm))
8682 {
8683 /* register target */
8684 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8686 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8687 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8688 } IEM_MC_ELSE() {
8689 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8690 } IEM_MC_ENDIF();
8691 IEM_MC_ADVANCE_RIP_AND_FINISH();
8692 IEM_MC_END();
8693 }
8694 else
8695 {
8696 /* memory target */
8697 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8701 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8702 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8703 } IEM_MC_ELSE() {
8704 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8705 } IEM_MC_ENDIF();
8706 IEM_MC_ADVANCE_RIP_AND_FINISH();
8707 IEM_MC_END();
8708 }
8709}
8710
8711
8712/**
8713 * @opcode 0x97
8714 * @opfltest cf,zf
8715 */
8716FNIEMOP_DEF(iemOp_setnbe_Eb)
8717{
8718 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8719 IEMOP_HLP_MIN_386();
8720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8721
8722 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8723 * any way. AMD says it's "unused", whatever that means. We're
8724 * ignoring for now. */
8725 if (IEM_IS_MODRM_REG_MODE(bRm))
8726 {
8727 /* register target */
8728 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8730 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8731 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8732 } IEM_MC_ELSE() {
8733 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8734 } IEM_MC_ENDIF();
8735 IEM_MC_ADVANCE_RIP_AND_FINISH();
8736 IEM_MC_END();
8737 }
8738 else
8739 {
8740 /* memory target */
8741 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8745 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8746 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8747 } IEM_MC_ELSE() {
8748 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8749 } IEM_MC_ENDIF();
8750 IEM_MC_ADVANCE_RIP_AND_FINISH();
8751 IEM_MC_END();
8752 }
8753}
8754
8755
8756/**
8757 * @opcode 0x98
8758 * @opfltest sf
8759 */
8760FNIEMOP_DEF(iemOp_sets_Eb)
8761{
8762 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8763 IEMOP_HLP_MIN_386();
8764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8765
8766 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8767 * any way. AMD says it's "unused", whatever that means. We're
8768 * ignoring for now. */
8769 if (IEM_IS_MODRM_REG_MODE(bRm))
8770 {
8771 /* register target */
8772 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8774 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8775 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8776 } IEM_MC_ELSE() {
8777 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8778 } IEM_MC_ENDIF();
8779 IEM_MC_ADVANCE_RIP_AND_FINISH();
8780 IEM_MC_END();
8781 }
8782 else
8783 {
8784 /* memory target */
8785 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8790 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8791 } IEM_MC_ELSE() {
8792 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8793 } IEM_MC_ENDIF();
8794 IEM_MC_ADVANCE_RIP_AND_FINISH();
8795 IEM_MC_END();
8796 }
8797}
8798
8799
8800/**
8801 * @opcode 0x99
8802 * @opfltest sf
8803 */
8804FNIEMOP_DEF(iemOp_setns_Eb)
8805{
8806 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8807 IEMOP_HLP_MIN_386();
8808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8809
8810 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8811 * any way. AMD says it's "unused", whatever that means. We're
8812 * ignoring for now. */
8813 if (IEM_IS_MODRM_REG_MODE(bRm))
8814 {
8815 /* register target */
8816 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8819 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8820 } IEM_MC_ELSE() {
8821 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8822 } IEM_MC_ENDIF();
8823 IEM_MC_ADVANCE_RIP_AND_FINISH();
8824 IEM_MC_END();
8825 }
8826 else
8827 {
8828 /* memory target */
8829 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8833 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8834 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8835 } IEM_MC_ELSE() {
8836 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8837 } IEM_MC_ENDIF();
8838 IEM_MC_ADVANCE_RIP_AND_FINISH();
8839 IEM_MC_END();
8840 }
8841}
8842
8843
8844/**
8845 * @opcode 0x9a
8846 * @opfltest pf
8847 */
8848FNIEMOP_DEF(iemOp_setp_Eb)
8849{
8850 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8851 IEMOP_HLP_MIN_386();
8852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8853
8854 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8855 * any way. AMD says it's "unused", whatever that means. We're
8856 * ignoring for now. */
8857 if (IEM_IS_MODRM_REG_MODE(bRm))
8858 {
8859 /* register target */
8860 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8863 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8864 } IEM_MC_ELSE() {
8865 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8866 } IEM_MC_ENDIF();
8867 IEM_MC_ADVANCE_RIP_AND_FINISH();
8868 IEM_MC_END();
8869 }
8870 else
8871 {
8872 /* memory target */
8873 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8877 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8878 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8879 } IEM_MC_ELSE() {
8880 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8881 } IEM_MC_ENDIF();
8882 IEM_MC_ADVANCE_RIP_AND_FINISH();
8883 IEM_MC_END();
8884 }
8885}
8886
8887
8888/**
8889 * @opcode 0x9b
8890 * @opfltest pf
8891 */
8892FNIEMOP_DEF(iemOp_setnp_Eb)
8893{
8894 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8895 IEMOP_HLP_MIN_386();
8896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8897
8898 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8899 * any way. AMD says it's "unused", whatever that means. We're
8900 * ignoring for now. */
8901 if (IEM_IS_MODRM_REG_MODE(bRm))
8902 {
8903 /* register target */
8904 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8907 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8908 } IEM_MC_ELSE() {
8909 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8910 } IEM_MC_ENDIF();
8911 IEM_MC_ADVANCE_RIP_AND_FINISH();
8912 IEM_MC_END();
8913 }
8914 else
8915 {
8916 /* memory target */
8917 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8922 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8923 } IEM_MC_ELSE() {
8924 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8925 } IEM_MC_ENDIF();
8926 IEM_MC_ADVANCE_RIP_AND_FINISH();
8927 IEM_MC_END();
8928 }
8929}
8930
8931
8932/**
8933 * @opcode 0x9c
8934 * @opfltest sf,of
8935 */
8936FNIEMOP_DEF(iemOp_setl_Eb)
8937{
8938 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8939 IEMOP_HLP_MIN_386();
8940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8941
8942 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8943 * any way. AMD says it's "unused", whatever that means. We're
8944 * ignoring for now. */
8945 if (IEM_IS_MODRM_REG_MODE(bRm))
8946 {
8947 /* register target */
8948 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8950 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8951 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8952 } IEM_MC_ELSE() {
8953 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8954 } IEM_MC_ENDIF();
8955 IEM_MC_ADVANCE_RIP_AND_FINISH();
8956 IEM_MC_END();
8957 }
8958 else
8959 {
8960 /* memory target */
8961 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8965 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8966 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8967 } IEM_MC_ELSE() {
8968 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8969 } IEM_MC_ENDIF();
8970 IEM_MC_ADVANCE_RIP_AND_FINISH();
8971 IEM_MC_END();
8972 }
8973}
8974
8975
8976/**
8977 * @opcode 0x9d
8978 * @opfltest sf,of
8979 */
8980FNIEMOP_DEF(iemOp_setnl_Eb)
8981{
8982 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8983 IEMOP_HLP_MIN_386();
8984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8985
8986 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8987 * any way. AMD says it's "unused", whatever that means. We're
8988 * ignoring for now. */
8989 if (IEM_IS_MODRM_REG_MODE(bRm))
8990 {
8991 /* register target */
8992 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8994 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8995 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8996 } IEM_MC_ELSE() {
8997 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8998 } IEM_MC_ENDIF();
8999 IEM_MC_ADVANCE_RIP_AND_FINISH();
9000 IEM_MC_END();
9001 }
9002 else
9003 {
9004 /* memory target */
9005 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
9006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9009 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9010 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9011 } IEM_MC_ELSE() {
9012 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
9013 } IEM_MC_ENDIF();
9014 IEM_MC_ADVANCE_RIP_AND_FINISH();
9015 IEM_MC_END();
9016 }
9017}
9018
9019
9020/**
9021 * @opcode 0x9e
9022 * @opfltest zf,sf,of
9023 */
9024FNIEMOP_DEF(iemOp_setle_Eb)
9025{
9026 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
9027 IEMOP_HLP_MIN_386();
9028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9029
9030 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
9031 * any way. AMD says it's "unused", whatever that means. We're
9032 * ignoring for now. */
9033 if (IEM_IS_MODRM_REG_MODE(bRm))
9034 {
9035 /* register target */
9036 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
9037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9038 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9039 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
9040 } IEM_MC_ELSE() {
9041 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
9042 } IEM_MC_ENDIF();
9043 IEM_MC_ADVANCE_RIP_AND_FINISH();
9044 IEM_MC_END();
9045 }
9046 else
9047 {
9048 /* memory target */
9049 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
9050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9053 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9054 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
9055 } IEM_MC_ELSE() {
9056 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9057 } IEM_MC_ENDIF();
9058 IEM_MC_ADVANCE_RIP_AND_FINISH();
9059 IEM_MC_END();
9060 }
9061}
9062
9063
9064/**
9065 * @opcode 0x9f
9066 * @opfltest zf,sf,of
9067 */
9068FNIEMOP_DEF(iemOp_setnle_Eb)
9069{
9070 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
9071 IEMOP_HLP_MIN_386();
9072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9073
9074 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
9075 * any way. AMD says it's "unused", whatever that means. We're
9076 * ignoring for now. */
9077 if (IEM_IS_MODRM_REG_MODE(bRm))
9078 {
9079 /* register target */
9080 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
9081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9082 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9083 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
9084 } IEM_MC_ELSE() {
9085 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
9086 } IEM_MC_ENDIF();
9087 IEM_MC_ADVANCE_RIP_AND_FINISH();
9088 IEM_MC_END();
9089 }
9090 else
9091 {
9092 /* memory target */
9093 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
9094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9097 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9098 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9099 } IEM_MC_ELSE() {
9100 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
9101 } IEM_MC_ENDIF();
9102 IEM_MC_ADVANCE_RIP_AND_FINISH();
9103 IEM_MC_END();
9104 }
9105}
9106
9107
9108/** Opcode 0x0f 0xa0. */
9109FNIEMOP_DEF(iemOp_push_fs)
9110{
9111 IEMOP_MNEMONIC(push_fs, "push fs");
9112 IEMOP_HLP_MIN_386();
9113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9114 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
9115}
9116
9117
9118/** Opcode 0x0f 0xa1. */
9119FNIEMOP_DEF(iemOp_pop_fs)
9120{
9121 IEMOP_MNEMONIC(pop_fs, "pop fs");
9122 IEMOP_HLP_MIN_386();
9123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9124 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9125 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9126 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9127 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9128 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9129 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9130 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
9131 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
9132}
9133
9134
9135/** Opcode 0x0f 0xa2. */
9136FNIEMOP_DEF(iemOp_cpuid)
9137{
9138 IEMOP_MNEMONIC(cpuid, "cpuid");
9139 IEMOP_HLP_MIN_486(); /* not all 486es. */
9140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9141 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
9142 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
9143 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
9144 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
9145 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
9146 iemCImpl_cpuid);
9147}
9148
9149
9150/**
9151 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
9152 * iemOp_bts_Ev_Gv.
9153 */
9154
9155#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9157 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9158 \
9159 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9160 { \
9161 /* register destination. */ \
9162 switch (pVCpu->iem.s.enmEffOpSize) \
9163 { \
9164 case IEMMODE_16BIT: \
9165 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9167 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9168 IEM_MC_ARG(uint16_t, u16Src, 1); \
9169 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9170 \
9171 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9172 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9173 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9174 IEM_MC_REF_EFLAGS(pEFlags); \
9175 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9176 \
9177 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9178 IEM_MC_END(); \
9179 break; \
9180 \
9181 case IEMMODE_32BIT: \
9182 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9184 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9185 IEM_MC_ARG(uint32_t, u32Src, 1); \
9186 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9187 \
9188 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9189 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9190 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9191 IEM_MC_REF_EFLAGS(pEFlags); \
9192 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9193 \
9194 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9195 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9196 IEM_MC_END(); \
9197 break; \
9198 \
9199 case IEMMODE_64BIT: \
9200 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9202 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9203 IEM_MC_ARG(uint64_t, u64Src, 1); \
9204 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9205 \
9206 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9207 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9208 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9209 IEM_MC_REF_EFLAGS(pEFlags); \
9210 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9211 \
9212 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9213 IEM_MC_END(); \
9214 break; \
9215 \
9216 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9217 } \
9218 } \
9219 else \
9220 { \
9221 /* memory destination. */ \
9222 /** @todo test negative bit offsets! */ \
9223 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
9224 { \
9225 switch (pVCpu->iem.s.enmEffOpSize) \
9226 { \
9227 case IEMMODE_16BIT: \
9228 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9231 IEMOP_HLP_DONE_DECODING(); \
9232 \
9233 IEM_MC_ARG(uint16_t, u16Src, 1); \
9234 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9235 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9236 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9237 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9238 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9239 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9240 \
9241 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9242 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9243 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9244 \
9245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9246 IEM_MC_FETCH_EFLAGS(EFlags); \
9247 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9248 \
9249 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9250 IEM_MC_COMMIT_EFLAGS(EFlags); \
9251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9252 IEM_MC_END(); \
9253 break; \
9254 \
9255 case IEMMODE_32BIT: \
9256 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9259 IEMOP_HLP_DONE_DECODING(); \
9260 \
9261 IEM_MC_ARG(uint32_t, u32Src, 1); \
9262 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9263 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9264 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9265 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9266 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9267 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9268 \
9269 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9270 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9271 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9272 \
9273 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9274 IEM_MC_FETCH_EFLAGS(EFlags); \
9275 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9276 \
9277 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9278 IEM_MC_COMMIT_EFLAGS(EFlags); \
9279 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9280 IEM_MC_END(); \
9281 break; \
9282 \
9283 case IEMMODE_64BIT: \
9284 IEM_MC_BEGIN(3, 5, IEM_MC_F_64BIT, 0); \
9285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9287 IEMOP_HLP_DONE_DECODING(); \
9288 \
9289 IEM_MC_ARG(uint64_t, u64Src, 1); \
9290 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9291 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9292 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9293 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9294 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9295 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9296 \
9297 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9298 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9299 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9300 \
9301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9302 IEM_MC_FETCH_EFLAGS(EFlags); \
9303 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9304 \
9305 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9306 IEM_MC_COMMIT_EFLAGS(EFlags); \
9307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9308 IEM_MC_END(); \
9309 break; \
9310 \
9311 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9312 } \
9313 } \
9314 else \
9315 { \
9316 (void)0
9317/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9318#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9319 switch (pVCpu->iem.s.enmEffOpSize) \
9320 { \
9321 case IEMMODE_16BIT: \
9322 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9325 IEMOP_HLP_DONE_DECODING(); \
9326 \
9327 IEM_MC_ARG(uint16_t, u16Src, 1); \
9328 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9329 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9330 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9331 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9332 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9333 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9334 \
9335 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9336 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9337 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9338 \
9339 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9340 IEM_MC_FETCH_EFLAGS(EFlags); \
9341 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9342 \
9343 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9344 IEM_MC_COMMIT_EFLAGS(EFlags); \
9345 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9346 IEM_MC_END(); \
9347 break; \
9348 \
9349 case IEMMODE_32BIT: \
9350 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9353 IEMOP_HLP_DONE_DECODING(); \
9354 \
9355 IEM_MC_ARG(uint32_t, u32Src, 1); \
9356 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9357 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9358 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9359 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9360 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9361 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9362 \
9363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9364 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9365 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9366 \
9367 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9368 IEM_MC_FETCH_EFLAGS(EFlags); \
9369 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9370 \
9371 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9372 IEM_MC_COMMIT_EFLAGS(EFlags); \
9373 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9374 IEM_MC_END(); \
9375 break; \
9376 \
9377 case IEMMODE_64BIT: \
9378 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9381 IEMOP_HLP_DONE_DECODING(); \
9382 \
9383 IEM_MC_ARG(uint64_t, u64Src, 1); \
9384 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9385 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9386 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9387 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9388 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9389 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9390 \
9391 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9392 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9393 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9394 \
9395 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9396 IEM_MC_FETCH_EFLAGS(EFlags); \
9397 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9398 \
9399 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9400 IEM_MC_COMMIT_EFLAGS(EFlags); \
9401 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9402 IEM_MC_END(); \
9403 break; \
9404 \
9405 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9406 } \
9407 } \
9408 } \
9409 (void)0
9410
9411/* Read-only version (bt). */
9412#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9413 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9414 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9415 \
9416 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9417 { \
9418 /* register destination. */ \
9419 switch (pVCpu->iem.s.enmEffOpSize) \
9420 { \
9421 case IEMMODE_16BIT: \
9422 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9424 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9425 IEM_MC_ARG(uint16_t, u16Src, 1); \
9426 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9427 \
9428 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9429 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9430 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9431 IEM_MC_REF_EFLAGS(pEFlags); \
9432 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9433 \
9434 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9435 IEM_MC_END(); \
9436 break; \
9437 \
9438 case IEMMODE_32BIT: \
9439 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9441 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9442 IEM_MC_ARG(uint32_t, u32Src, 1); \
9443 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9444 \
9445 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9446 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9447 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9448 IEM_MC_REF_EFLAGS(pEFlags); \
9449 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9450 \
9451 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9452 IEM_MC_END(); \
9453 break; \
9454 \
9455 case IEMMODE_64BIT: \
9456 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9458 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9459 IEM_MC_ARG(uint64_t, u64Src, 1); \
9460 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9461 \
9462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9463 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9464 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9465 IEM_MC_REF_EFLAGS(pEFlags); \
9466 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9467 \
9468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9469 IEM_MC_END(); \
9470 break; \
9471 \
9472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9473 } \
9474 } \
9475 else \
9476 { \
9477 /* memory destination. */ \
9478 /** @todo test negative bit offsets! */ \
9479 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9480 { \
9481 switch (pVCpu->iem.s.enmEffOpSize) \
9482 { \
9483 case IEMMODE_16BIT: \
9484 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9487 IEMOP_HLP_DONE_DECODING(); \
9488 \
9489 IEM_MC_ARG(uint16_t, u16Src, 1); \
9490 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9491 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9492 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9493 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9494 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9495 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9496 \
9497 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9498 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9499 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9500 \
9501 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9502 IEM_MC_FETCH_EFLAGS(EFlags); \
9503 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9504 \
9505 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9506 IEM_MC_COMMIT_EFLAGS(EFlags); \
9507 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9508 IEM_MC_END(); \
9509 break; \
9510 \
9511 case IEMMODE_32BIT: \
9512 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9515 IEMOP_HLP_DONE_DECODING(); \
9516 \
9517 IEM_MC_ARG(uint32_t, u32Src, 1); \
9518 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9519 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9520 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9521 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9522 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9523 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9524 \
9525 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9526 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9527 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9528 \
9529 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9530 IEM_MC_FETCH_EFLAGS(EFlags); \
9531 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9532 \
9533 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9534 IEM_MC_COMMIT_EFLAGS(EFlags); \
9535 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9536 IEM_MC_END(); \
9537 break; \
9538 \
9539 case IEMMODE_64BIT: \
9540 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9543 IEMOP_HLP_DONE_DECODING(); \
9544 \
9545 IEM_MC_ARG(uint64_t, u64Src, 1); \
9546 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9547 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9548 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9549 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9550 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9551 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9552 \
9553 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9554 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9555 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9556 \
9557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9558 IEM_MC_FETCH_EFLAGS(EFlags); \
9559 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9560 \
9561 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9562 IEM_MC_COMMIT_EFLAGS(EFlags); \
9563 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9564 IEM_MC_END(); \
9565 break; \
9566 \
9567 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9568 } \
9569 } \
9570 else \
9571 { \
9572 IEMOP_HLP_DONE_DECODING(); \
9573 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9574 } \
9575 } \
9576 (void)0
9577
9578
9579/**
9580 * @opcode 0xa3
9581 * @oppfx n/a
9582 * @opflclass bitmap
9583 */
9584FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9585{
9586 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9587 IEMOP_HLP_MIN_386();
9588 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9589}
9590
9591
9592/**
9593 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9594 */
9595#define IEMOP_BODY_SHLD_SHR_Ib(a_pImplExpr) \
9596 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9597 \
9598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9599 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9600 \
9601 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9602 { \
9603 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9604 \
9605 switch (pVCpu->iem.s.enmEffOpSize) \
9606 { \
9607 case IEMMODE_16BIT: \
9608 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0); \
9609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9610 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9611 IEM_MC_ARG(uint16_t, u16Src, 1); \
9612 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9613 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9614 \
9615 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9616 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9617 IEM_MC_REF_EFLAGS(pEFlags); \
9618 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9619 \
9620 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9621 IEM_MC_END(); \
9622 break; \
9623 \
9624 case IEMMODE_32BIT: \
9625 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0); \
9626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9627 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9628 IEM_MC_ARG(uint32_t, u32Src, 1); \
9629 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9630 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9631 \
9632 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9633 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9634 IEM_MC_REF_EFLAGS(pEFlags); \
9635 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9636 \
9637 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9638 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9639 IEM_MC_END(); \
9640 break; \
9641 \
9642 case IEMMODE_64BIT: \
9643 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0); \
9644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9645 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9646 IEM_MC_ARG(uint64_t, u64Src, 1); \
9647 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); \
9648 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9649 \
9650 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9651 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9652 IEM_MC_REF_EFLAGS(pEFlags); \
9653 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9654 \
9655 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9656 IEM_MC_END(); \
9657 break; \
9658 \
9659 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9660 } \
9661 } \
9662 else \
9663 { \
9664 switch (pVCpu->iem.s.enmEffOpSize) \
9665 { \
9666 case IEMMODE_16BIT: \
9667 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0); \
9668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9670 \
9671 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9673 \
9674 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9675 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9676 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9677 \
9678 IEM_MC_ARG(uint16_t, u16Src, 1); \
9679 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9680 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9681 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9682 IEM_MC_FETCH_EFLAGS(EFlags); \
9683 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9684 \
9685 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9686 IEM_MC_COMMIT_EFLAGS(EFlags); \
9687 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9688 IEM_MC_END(); \
9689 break; \
9690 \
9691 case IEMMODE_32BIT: \
9692 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0); \
9693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9695 \
9696 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9698 \
9699 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9700 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9701 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9702 \
9703 IEM_MC_ARG(uint32_t, u32Src, 1); \
9704 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9705 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9706 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9707 IEM_MC_FETCH_EFLAGS(EFlags); \
9708 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9709 \
9710 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9711 IEM_MC_COMMIT_EFLAGS(EFlags); \
9712 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9713 IEM_MC_END(); \
9714 break; \
9715 \
9716 case IEMMODE_64BIT: \
9717 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0); \
9718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
9720 \
9721 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
9722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9723 \
9724 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9725 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9726 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9727 \
9728 IEM_MC_ARG(uint64_t, u64Src, 1); \
9729 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9730 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2); \
9731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9732 IEM_MC_FETCH_EFLAGS(EFlags); \
9733 \
9734 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9735 \
9736 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9737 IEM_MC_COMMIT_EFLAGS(EFlags); \
9738 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9739 IEM_MC_END(); \
9740 break; \
9741 \
9742 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9743 } \
9744 } (void)0
9745
9746
9747/**
9748 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9749 */
9750#define IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(a_pImplExpr) \
9751 PCIEMOPSHIFTDBLSIZES const pImpl = (a_pImplExpr); \
9752 \
9753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9754 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); \
9755 \
9756 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9757 { \
9758 switch (pVCpu->iem.s.enmEffOpSize) \
9759 { \
9760 case IEMMODE_16BIT: \
9761 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0); \
9762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9763 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9764 IEM_MC_ARG(uint16_t, u16Src, 1); \
9765 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9766 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9767 \
9768 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9769 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9770 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9771 IEM_MC_REF_EFLAGS(pEFlags); \
9772 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9773 \
9774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9775 IEM_MC_END(); \
9776 break; \
9777 \
9778 case IEMMODE_32BIT: \
9779 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0); \
9780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9781 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9782 IEM_MC_ARG(uint32_t, u32Src, 1); \
9783 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9784 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9785 \
9786 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9787 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9788 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9789 IEM_MC_REF_EFLAGS(pEFlags); \
9790 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9791 \
9792 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9793 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9794 IEM_MC_END(); \
9795 break; \
9796 \
9797 case IEMMODE_64BIT: \
9798 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0); \
9799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9800 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9801 IEM_MC_ARG(uint64_t, u64Src, 1); \
9802 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9803 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
9804 \
9805 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9806 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9807 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9808 IEM_MC_REF_EFLAGS(pEFlags); \
9809 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9810 \
9811 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9812 IEM_MC_END(); \
9813 break; \
9814 \
9815 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9816 } \
9817 } \
9818 else \
9819 { \
9820 switch (pVCpu->iem.s.enmEffOpSize) \
9821 { \
9822 case IEMMODE_16BIT: \
9823 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0); \
9824 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9825 IEM_MC_ARG(uint16_t, u16Src, 1); \
9826 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9827 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9829 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9830 \
9831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9833 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9834 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9835 IEM_MC_FETCH_EFLAGS(EFlags); \
9836 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9837 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); \
9838 \
9839 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9840 IEM_MC_COMMIT_EFLAGS(EFlags); \
9841 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9842 IEM_MC_END(); \
9843 break; \
9844 \
9845 case IEMMODE_32BIT: \
9846 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0); \
9847 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9848 IEM_MC_ARG(uint32_t, u32Src, 1); \
9849 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9850 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9852 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9853 \
9854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9856 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9857 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9858 IEM_MC_FETCH_EFLAGS(EFlags); \
9859 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9860 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); \
9861 \
9862 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9863 IEM_MC_COMMIT_EFLAGS(EFlags); \
9864 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9865 IEM_MC_END(); \
9866 break; \
9867 \
9868 case IEMMODE_64BIT: \
9869 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0); \
9870 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9871 IEM_MC_ARG(uint64_t, u64Src, 1); \
9872 IEM_MC_ARG(uint8_t, cShiftArg, 2); \
9873 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
9874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9875 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9876 \
9877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9879 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9880 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9881 IEM_MC_FETCH_EFLAGS(EFlags); \
9882 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9883 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); \
9884 \
9885 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9886 IEM_MC_COMMIT_EFLAGS(EFlags); \
9887 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9888 IEM_MC_END(); \
9889 break; \
9890 \
9891 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9892 } \
9893 } (void)0
9894
9895
9896/**
9897 * @opcode 0xa4
9898 * @opflclass shift_count
9899 */
9900FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9901{
9902 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9903 IEMOP_HLP_MIN_386();
9904 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9905}
9906
9907
9908/**
9909 * @opcode 0xa5
9910 * @opflclass shift_count
9911 */
9912FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9913{
9914 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9915 IEMOP_HLP_MIN_386();
9916 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9917}
9918
9919
9920/** Opcode 0x0f 0xa8. */
9921FNIEMOP_DEF(iemOp_push_gs)
9922{
9923 IEMOP_MNEMONIC(push_gs, "push gs");
9924 IEMOP_HLP_MIN_386();
9925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9926 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9927}
9928
9929
9930/** Opcode 0x0f 0xa9. */
9931FNIEMOP_DEF(iemOp_pop_gs)
9932{
9933 IEMOP_MNEMONIC(pop_gs, "pop gs");
9934 IEMOP_HLP_MIN_386();
9935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9936 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9937 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9938 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9939 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9940 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9941 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9942 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9943 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9944}
9945
9946
9947/** Opcode 0x0f 0xaa. */
9948FNIEMOP_DEF(iemOp_rsm)
9949{
9950 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9951 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9953 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9954 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9955 iemCImpl_rsm);
9956}
9957
9958
9959
9960/**
9961 * @opcode 0xab
9962 * @oppfx n/a
9963 * @opflclass bitmap
9964 */
9965FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9966{
9967 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9968 IEMOP_HLP_MIN_386();
9969 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9970 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9971}
9972
9973
9974/**
9975 * @opcode 0xac
9976 * @opflclass shift_count
9977 */
9978FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9979{
9980 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9981 IEMOP_HLP_MIN_386();
9982 IEMOP_BODY_SHLD_SHR_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9983}
9984
9985
9986/**
9987 * @opcode 0xad
9988 * @opflclass shift_count
9989 */
9990FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9991{
9992 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9993 IEMOP_HLP_MIN_386();
9994 IEMOP_BODY_SHLD_SHRD_Ev_Gv_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9995}
9996
9997
9998/** Opcode 0x0f 0xae mem/0. */
9999FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
10000{
10001 IEMOP_MNEMONIC(fxsave, "fxsave m512");
10002 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
10003 IEMOP_RAISE_INVALID_OPCODE_RET();
10004
10005 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
10006 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10009 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10010 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10011 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
10012 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
10013 IEM_MC_END();
10014}
10015
10016
10017/** Opcode 0x0f 0xae mem/1. */
10018FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
10019{
10020 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
10021 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
10022 IEMOP_RAISE_INVALID_OPCODE_RET();
10023
10024 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
10025 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10028 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10029 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10030 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
10031 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10032 iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
10033 IEM_MC_END();
10034}
10035
10036
10037/**
10038 * @opmaps grp15
10039 * @opcode !11/2
10040 * @oppfx none
10041 * @opcpuid sse
10042 * @opgroup og_sse_mxcsrsm
10043 * @opxcpttype 5
10044 * @optest op1=0 -> mxcsr=0
10045 * @optest op1=0x2083 -> mxcsr=0x2083
10046 * @optest op1=0xfffffffe -> value.xcpt=0xd
10047 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
10048 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
10049 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
10050 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
10051 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
10052 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
10053 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
10054 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
10055 */
10056FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
10057{
10058 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10059 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
10060 IEMOP_RAISE_INVALID_OPCODE_RET();
10061
10062 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
10063 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10066 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10067 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10068 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
10069 IEM_MC_END();
10070}
10071
10072
10073/**
10074 * @opmaps grp15
10075 * @opcode !11/3
10076 * @oppfx none
10077 * @opcpuid sse
10078 * @opgroup og_sse_mxcsrsm
10079 * @opxcpttype 5
10080 * @optest mxcsr=0 -> op1=0
10081 * @optest mxcsr=0x2083 -> op1=0x2083
10082 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
10083 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
10084 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
10085 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
10086 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
10087 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
10088 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
10089 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
10090 */
10091FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
10092{
10093 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10094 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
10095 IEMOP_RAISE_INVALID_OPCODE_RET();
10096
10097 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
10098 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10101 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10102 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10103 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
10104 IEM_MC_END();
10105}
10106
10107
10108/**
10109 * @opmaps grp15
10110 * @opcode !11/4
10111 * @oppfx none
10112 * @opcpuid xsave
10113 * @opgroup og_system
10114 * @opxcpttype none
10115 */
10116FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
10117{
10118 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
10119 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
10120 IEMOP_RAISE_INVALID_OPCODE_RET();
10121
10122 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
10123 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10126 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10127 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10128 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
10129 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
10130 IEM_MC_END();
10131}
10132
10133
10134/**
10135 * @opmaps grp15
10136 * @opcode !11/5
10137 * @oppfx none
10138 * @opcpuid xsave
10139 * @opgroup og_system
10140 * @opxcpttype none
10141 */
10142FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
10143{
10144 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
10145 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
10146 IEMOP_RAISE_INVALID_OPCODE_RET();
10147
10148 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
10149 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10152 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10153 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10154 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
10155 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10156 iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
10157 IEM_MC_END();
10158}
10159
10160/** Opcode 0x0f 0xae mem/6. */
10161FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
10162
10163/**
10164 * @opmaps grp15
10165 * @opcode !11/7
10166 * @oppfx none
10167 * @opcpuid clfsh
10168 * @opgroup og_cachectl
10169 * @optest op1=1 ->
10170 */
10171FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
10172{
10173 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10174 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
10175 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
10176
10177 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10178 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10181 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10182 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
10183 IEM_MC_END();
10184}
10185
10186/**
10187 * @opmaps grp15
10188 * @opcode !11/7
10189 * @oppfx 0x66
10190 * @opcpuid clflushopt
10191 * @opgroup og_cachectl
10192 * @optest op1=1 ->
10193 */
10194FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
10195{
10196 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10197 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
10198 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
10199
10200 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10201 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10204 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10205 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
10206 IEM_MC_END();
10207}
10208
10209
10210/** Opcode 0x0f 0xae 11b/5. */
10211FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
10212{
10213 RT_NOREF_PV(bRm);
10214 IEMOP_MNEMONIC(lfence, "lfence");
10215 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10217#ifdef RT_ARCH_ARM64
10218 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
10219#else
10220 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10221 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
10222 else
10223 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10224#endif
10225 IEM_MC_ADVANCE_RIP_AND_FINISH();
10226 IEM_MC_END();
10227}
10228
10229
10230/** Opcode 0x0f 0xae 11b/6. */
10231FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
10232{
10233 RT_NOREF_PV(bRm);
10234 IEMOP_MNEMONIC(mfence, "mfence");
10235 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10237#ifdef RT_ARCH_ARM64
10238 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
10239#else
10240 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10241 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
10242 else
10243 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10244#endif
10245 IEM_MC_ADVANCE_RIP_AND_FINISH();
10246 IEM_MC_END();
10247}
10248
10249
10250/** Opcode 0x0f 0xae 11b/7. */
10251FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
10252{
10253 RT_NOREF_PV(bRm);
10254 IEMOP_MNEMONIC(sfence, "sfence");
10255 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10257#ifdef RT_ARCH_ARM64
10258 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10259#else
10260 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10261 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10262 else
10263 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10264#endif
10265 IEM_MC_ADVANCE_RIP_AND_FINISH();
10266 IEM_MC_END();
10267}
10268
10269
10270/** Opcode 0xf3 0x0f 0xae 11b/0. */
10271FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10272{
10273 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10274 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10275 {
10276 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10278 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10279 IEM_MC_LOCAL(uint64_t, u64Dst);
10280 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10281 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10282 IEM_MC_ADVANCE_RIP_AND_FINISH();
10283 IEM_MC_END();
10284 }
10285 else
10286 {
10287 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10289 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10290 IEM_MC_LOCAL(uint32_t, u32Dst);
10291 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10292 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10293 IEM_MC_ADVANCE_RIP_AND_FINISH();
10294 IEM_MC_END();
10295 }
10296}
10297
10298
10299/** Opcode 0xf3 0x0f 0xae 11b/1. */
10300FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10301{
10302 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10303 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10304 {
10305 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10307 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10308 IEM_MC_LOCAL(uint64_t, u64Dst);
10309 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10310 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10311 IEM_MC_ADVANCE_RIP_AND_FINISH();
10312 IEM_MC_END();
10313 }
10314 else
10315 {
10316 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10318 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10319 IEM_MC_LOCAL(uint32_t, u32Dst);
10320 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10321 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10322 IEM_MC_ADVANCE_RIP_AND_FINISH();
10323 IEM_MC_END();
10324 }
10325}
10326
10327
10328/** Opcode 0xf3 0x0f 0xae 11b/2. */
10329FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10330{
10331 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10332 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10333 {
10334 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10336 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10337 IEM_MC_LOCAL(uint64_t, u64Dst);
10338 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10339 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10340 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10341 IEM_MC_ADVANCE_RIP_AND_FINISH();
10342 IEM_MC_END();
10343 }
10344 else
10345 {
10346 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10348 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10349 IEM_MC_LOCAL(uint32_t, u32Dst);
10350 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10351 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10352 IEM_MC_ADVANCE_RIP_AND_FINISH();
10353 IEM_MC_END();
10354 }
10355}
10356
10357
10358/** Opcode 0xf3 0x0f 0xae 11b/3. */
10359FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10360{
10361 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10362 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10363 {
10364 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10366 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10367 IEM_MC_LOCAL(uint64_t, u64Dst);
10368 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10369 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10370 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10371 IEM_MC_ADVANCE_RIP_AND_FINISH();
10372 IEM_MC_END();
10373 }
10374 else
10375 {
10376 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10378 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10379 IEM_MC_LOCAL(uint32_t, u32Dst);
10380 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10381 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10382 IEM_MC_ADVANCE_RIP_AND_FINISH();
10383 IEM_MC_END();
10384 }
10385}
10386
10387
10388/**
10389 * Group 15 jump table for register variant.
10390 */
10391IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10392{ /* pfx: none, 066h, 0f3h, 0f2h */
10393 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10394 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10395 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10396 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10397 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10398 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10399 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10400 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10401};
10402AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10403
10404
10405/**
10406 * Group 15 jump table for memory variant.
10407 */
10408IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10409{ /* pfx: none, 066h, 0f3h, 0f2h */
10410 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10411 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10412 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10413 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10414 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10415 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10416 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10417 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10418};
10419AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10420
10421
10422/** Opcode 0x0f 0xae. */
10423FNIEMOP_DEF(iemOp_Grp15)
10424{
10425 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10427 if (IEM_IS_MODRM_REG_MODE(bRm))
10428 /* register, register */
10429 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10430 + pVCpu->iem.s.idxPrefix], bRm);
10431 /* memory, register */
10432 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10433 + pVCpu->iem.s.idxPrefix], bRm);
10434}
10435
10436
10437/**
10438 * @opcode 0xaf
10439 * @opflclass multiply
10440 */
10441FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10442{
10443 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10444 IEMOP_HLP_MIN_386();
10445 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10446 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10448 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_MIN_386, imul, 0);
10449}
10450
10451
10452/**
10453 * @opcode 0xb0
10454 * @opflclass arithmetic
10455 */
10456FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10457{
10458 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10459 IEMOP_HLP_MIN_486();
10460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10461
10462 if (IEM_IS_MODRM_REG_MODE(bRm))
10463 {
10464 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10466 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10467 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10468 IEM_MC_ARG(uint8_t, u8Src, 2);
10469 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10470
10471 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10472 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10473 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10474 IEM_MC_REF_EFLAGS(pEFlags);
10475 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10476
10477 IEM_MC_ADVANCE_RIP_AND_FINISH();
10478 IEM_MC_END();
10479 }
10480 else
10481 {
10482#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10483 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \
10484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10486 IEMOP_HLP_DONE_DECODING(); \
10487 \
10488 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10489 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10490 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10491 \
10492 IEM_MC_ARG(uint8_t, u8Src, 2); \
10493 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10494 \
10495 IEM_MC_LOCAL(uint8_t, u8Al); \
10496 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10497 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10498 \
10499 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10500 IEM_MC_FETCH_EFLAGS(EFlags); \
10501 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10502 \
10503 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10504 IEM_MC_COMMIT_EFLAGS(EFlags); \
10505 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10506 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10507 IEM_MC_END()
10508
10509 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10510 {
10511 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10512 }
10513 else
10514 {
10515 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10516 }
10517 }
10518}
10519
10520/**
10521 * @opcode 0xb1
10522 * @opflclass arithmetic
10523 */
10524FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10525{
10526 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10527 IEMOP_HLP_MIN_486();
10528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10529
10530 if (IEM_IS_MODRM_REG_MODE(bRm))
10531 {
10532 switch (pVCpu->iem.s.enmEffOpSize)
10533 {
10534 case IEMMODE_16BIT:
10535 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10537 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10538 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10539 IEM_MC_ARG(uint16_t, u16Src, 2);
10540 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10541
10542 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10543 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10544 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10545 IEM_MC_REF_EFLAGS(pEFlags);
10546 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10547
10548 IEM_MC_ADVANCE_RIP_AND_FINISH();
10549 IEM_MC_END();
10550 break;
10551
10552 case IEMMODE_32BIT:
10553 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10555 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10556 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10557 IEM_MC_ARG(uint32_t, u32Src, 2);
10558 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10559
10560 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10561 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10562 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10563 IEM_MC_REF_EFLAGS(pEFlags);
10564 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10565
10566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10567 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10568 } IEM_MC_ELSE() {
10569 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10570 } IEM_MC_ENDIF();
10571
10572 IEM_MC_ADVANCE_RIP_AND_FINISH();
10573 IEM_MC_END();
10574 break;
10575
10576 case IEMMODE_64BIT:
10577 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
10578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10579 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10580 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10581 IEM_MC_ARG(uint64_t, u64Src, 2);
10582 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10583
10584 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10585 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10586 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10587 IEM_MC_REF_EFLAGS(pEFlags);
10588 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10589
10590 IEM_MC_ADVANCE_RIP_AND_FINISH();
10591 IEM_MC_END();
10592 break;
10593
10594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10595 }
10596 }
10597 else
10598 {
10599#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10600 do { \
10601 switch (pVCpu->iem.s.enmEffOpSize) \
10602 { \
10603 case IEMMODE_16BIT: \
10604 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \
10605 \
10606 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10609 IEMOP_HLP_DONE_DECODING(); \
10610 \
10611 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10612 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10613 \
10614 IEM_MC_ARG(uint16_t, u16Src, 2); \
10615 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10616 \
10617 IEM_MC_LOCAL(uint16_t, u16Ax); \
10618 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10619 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10620 \
10621 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10622 IEM_MC_FETCH_EFLAGS(EFlags); \
10623 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10624 \
10625 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10626 IEM_MC_COMMIT_EFLAGS(EFlags); \
10627 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10628 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10629 IEM_MC_END(); \
10630 break; \
10631 \
10632 case IEMMODE_32BIT: \
10633 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \
10634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10636 IEMOP_HLP_DONE_DECODING(); \
10637 \
10638 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10639 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10640 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10641 \
10642 IEM_MC_ARG(uint32_t, u32Src, 2); \
10643 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10644 \
10645 IEM_MC_LOCAL(uint32_t, u32Eax); \
10646 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10647 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10648 \
10649 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10650 IEM_MC_FETCH_EFLAGS(EFlags); \
10651 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10652 \
10653 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10654 IEM_MC_COMMIT_EFLAGS(EFlags); \
10655 \
10656 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10657 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10658 } IEM_MC_ENDIF(); \
10659 \
10660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10661 IEM_MC_END(); \
10662 break; \
10663 \
10664 case IEMMODE_64BIT: \
10665 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT, 0); \
10666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10668 IEMOP_HLP_DONE_DECODING(); \
10669 \
10670 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10671 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10672 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10673 \
10674 IEM_MC_ARG(uint64_t, u64Src, 2); \
10675 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10676 \
10677 IEM_MC_LOCAL(uint64_t, u64Rax); \
10678 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10679 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10680 \
10681 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10682 IEM_MC_FETCH_EFLAGS(EFlags); \
10683 \
10684 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10685 \
10686 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10687 IEM_MC_COMMIT_EFLAGS(EFlags); \
10688 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10689 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10690 IEM_MC_END(); \
10691 break; \
10692 \
10693 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10694 } \
10695 } while (0)
10696
10697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10698 {
10699 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10700 }
10701 else
10702 {
10703 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10704 }
10705 }
10706}
10707
10708
10709/** Opcode 0x0f 0xb2. */
10710FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10711{
10712 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10713 IEMOP_HLP_MIN_386();
10714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10715 if (IEM_IS_MODRM_REG_MODE(bRm))
10716 IEMOP_RAISE_INVALID_OPCODE_RET();
10717 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10718}
10719
10720
10721/**
10722 * @opcode 0xb3
10723 * @oppfx n/a
10724 * @opflclass bitmap
10725 */
10726FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10727{
10728 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10729 IEMOP_HLP_MIN_386();
10730 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10731 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10732}
10733
10734
10735/** Opcode 0x0f 0xb4. */
10736FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10737{
10738 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10739 IEMOP_HLP_MIN_386();
10740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10741 if (IEM_IS_MODRM_REG_MODE(bRm))
10742 IEMOP_RAISE_INVALID_OPCODE_RET();
10743 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10744}
10745
10746
10747/** Opcode 0x0f 0xb5. */
10748FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10749{
10750 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10751 IEMOP_HLP_MIN_386();
10752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10753 if (IEM_IS_MODRM_REG_MODE(bRm))
10754 IEMOP_RAISE_INVALID_OPCODE_RET();
10755 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10756}
10757
10758
10759/** Opcode 0x0f 0xb6. */
10760FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10761{
10762 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10763 IEMOP_HLP_MIN_386();
10764
10765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10766
10767 /*
10768 * If rm is denoting a register, no more instruction bytes.
10769 */
10770 if (IEM_IS_MODRM_REG_MODE(bRm))
10771 {
10772 switch (pVCpu->iem.s.enmEffOpSize)
10773 {
10774 case IEMMODE_16BIT:
10775 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10777 IEM_MC_LOCAL(uint16_t, u16Value);
10778 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10779 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10780 IEM_MC_ADVANCE_RIP_AND_FINISH();
10781 IEM_MC_END();
10782 break;
10783
10784 case IEMMODE_32BIT:
10785 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10787 IEM_MC_LOCAL(uint32_t, u32Value);
10788 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10789 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10790 IEM_MC_ADVANCE_RIP_AND_FINISH();
10791 IEM_MC_END();
10792 break;
10793
10794 case IEMMODE_64BIT:
10795 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10797 IEM_MC_LOCAL(uint64_t, u64Value);
10798 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10799 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10800 IEM_MC_ADVANCE_RIP_AND_FINISH();
10801 IEM_MC_END();
10802 break;
10803
10804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10805 }
10806 }
10807 else
10808 {
10809 /*
10810 * We're loading a register from memory.
10811 */
10812 switch (pVCpu->iem.s.enmEffOpSize)
10813 {
10814 case IEMMODE_16BIT:
10815 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10816 IEM_MC_LOCAL(uint16_t, u16Value);
10817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10820 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10821 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10822 IEM_MC_ADVANCE_RIP_AND_FINISH();
10823 IEM_MC_END();
10824 break;
10825
10826 case IEMMODE_32BIT:
10827 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10828 IEM_MC_LOCAL(uint32_t, u32Value);
10829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10832 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10833 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10834 IEM_MC_ADVANCE_RIP_AND_FINISH();
10835 IEM_MC_END();
10836 break;
10837
10838 case IEMMODE_64BIT:
10839 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10840 IEM_MC_LOCAL(uint64_t, u64Value);
10841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10844 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10845 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10846 IEM_MC_ADVANCE_RIP_AND_FINISH();
10847 IEM_MC_END();
10848 break;
10849
10850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10851 }
10852 }
10853}
10854
10855
10856/** Opcode 0x0f 0xb7. */
10857FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10858{
10859 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10860 IEMOP_HLP_MIN_386();
10861
10862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10863
10864 /** @todo Not entirely sure how the operand size prefix is handled here,
10865 * assuming that it will be ignored. Would be nice to have a few
10866 * test for this. */
10867
10868 /** @todo There should be no difference in the behaviour whether REX.W is
10869 * present or not... */
10870
10871 /*
10872 * If rm is denoting a register, no more instruction bytes.
10873 */
10874 if (IEM_IS_MODRM_REG_MODE(bRm))
10875 {
10876 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10877 {
10878 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10880 IEM_MC_LOCAL(uint32_t, u32Value);
10881 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10882 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10883 IEM_MC_ADVANCE_RIP_AND_FINISH();
10884 IEM_MC_END();
10885 }
10886 else
10887 {
10888 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10890 IEM_MC_LOCAL(uint64_t, u64Value);
10891 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10892 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10893 IEM_MC_ADVANCE_RIP_AND_FINISH();
10894 IEM_MC_END();
10895 }
10896 }
10897 else
10898 {
10899 /*
10900 * We're loading a register from memory.
10901 */
10902 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10903 {
10904 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10905 IEM_MC_LOCAL(uint32_t, u32Value);
10906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10909 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10910 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10911 IEM_MC_ADVANCE_RIP_AND_FINISH();
10912 IEM_MC_END();
10913 }
10914 else
10915 {
10916 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10917 IEM_MC_LOCAL(uint64_t, u64Value);
10918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10921 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10922 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10923 IEM_MC_ADVANCE_RIP_AND_FINISH();
10924 IEM_MC_END();
10925 }
10926 }
10927}
10928
10929
10930/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10931FNIEMOP_UD_STUB(iemOp_jmpe);
10932
10933
10934/**
10935 * @opcode 0xb8
10936 * @oppfx 0xf3
10937 * @opflmodify cf,pf,af,zf,sf,of
10938 * @opflclear cf,pf,af,sf,of
10939 */
10940FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10941{
10942 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10943 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10944 return iemOp_InvalidNeedRM(pVCpu);
10945#ifndef TST_IEM_CHECK_MC
10946# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10947 static const IEMOPBINSIZES s_Native =
10948 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10949# endif
10950 static const IEMOPBINSIZES s_Fallback =
10951 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10952#endif
10953 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10955 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, popcnt, 0);
10956}
10957
10958
10959/**
10960 * @opcode 0xb9
10961 * @opinvalid intel-modrm
10962 * @optest ->
10963 */
10964FNIEMOP_DEF(iemOp_Grp10)
10965{
10966 /*
10967 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10968 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10969 */
10970 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10971 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10972 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10973}
10974
10975
10976/**
10977 * Body for group 8 bit instruction.
10978 */
10979#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10980 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10981 \
10982 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10983 { \
10984 /* register destination. */ \
10985 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10986 \
10987 switch (pVCpu->iem.s.enmEffOpSize) \
10988 { \
10989 case IEMMODE_16BIT: \
10990 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10992 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10993 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10994 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10995 \
10996 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10997 IEM_MC_REF_EFLAGS(pEFlags); \
10998 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10999 \
11000 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11001 IEM_MC_END(); \
11002 break; \
11003 \
11004 case IEMMODE_32BIT: \
11005 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
11006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11007 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11008 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11009 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11010 \
11011 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11012 IEM_MC_REF_EFLAGS(pEFlags); \
11013 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11014 \
11015 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
11016 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11017 IEM_MC_END(); \
11018 break; \
11019 \
11020 case IEMMODE_64BIT: \
11021 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
11022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11023 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11024 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11025 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11026 \
11027 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11028 IEM_MC_REF_EFLAGS(pEFlags); \
11029 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11030 \
11031 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11032 IEM_MC_END(); \
11033 break; \
11034 \
11035 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11036 } \
11037 } \
11038 else \
11039 { \
11040 /* memory destination. */ \
11041 /** @todo test negative bit offsets! */ \
11042 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
11043 { \
11044 switch (pVCpu->iem.s.enmEffOpSize) \
11045 { \
11046 case IEMMODE_16BIT: \
11047 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11050 \
11051 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11052 IEMOP_HLP_DONE_DECODING(); \
11053 \
11054 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11055 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11056 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11057 \
11058 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11059 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11060 IEM_MC_FETCH_EFLAGS(EFlags); \
11061 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11062 \
11063 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
11064 IEM_MC_COMMIT_EFLAGS(EFlags); \
11065 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11066 IEM_MC_END(); \
11067 break; \
11068 \
11069 case IEMMODE_32BIT: \
11070 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11073 \
11074 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11075 IEMOP_HLP_DONE_DECODING(); \
11076 \
11077 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11078 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11079 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11080 \
11081 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11082 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11083 IEM_MC_FETCH_EFLAGS(EFlags); \
11084 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11085 \
11086 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
11087 IEM_MC_COMMIT_EFLAGS(EFlags); \
11088 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11089 IEM_MC_END(); \
11090 break; \
11091 \
11092 case IEMMODE_64BIT: \
11093 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11096 \
11097 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11098 IEMOP_HLP_DONE_DECODING(); \
11099 \
11100 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11101 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11102 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11103 \
11104 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11105 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11106 IEM_MC_FETCH_EFLAGS(EFlags); \
11107 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11108 \
11109 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
11110 IEM_MC_COMMIT_EFLAGS(EFlags); \
11111 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11112 IEM_MC_END(); \
11113 break; \
11114 \
11115 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11116 } \
11117 } \
11118 else \
11119 { \
11120 (void)0
11121/* Separate macro to work around parsing issue in IEMAllInstPython.py */
11122#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11123 switch (pVCpu->iem.s.enmEffOpSize) \
11124 { \
11125 case IEMMODE_16BIT: \
11126 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11129 \
11130 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11131 IEMOP_HLP_DONE_DECODING(); \
11132 \
11133 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11134 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11135 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11136 \
11137 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11138 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11139 IEM_MC_FETCH_EFLAGS(EFlags); \
11140 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
11141 \
11142 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
11143 IEM_MC_COMMIT_EFLAGS(EFlags); \
11144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11145 IEM_MC_END(); \
11146 break; \
11147 \
11148 case IEMMODE_32BIT: \
11149 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11152 \
11153 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11154 IEMOP_HLP_DONE_DECODING(); \
11155 \
11156 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11157 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11158 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11159 \
11160 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11161 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11162 IEM_MC_FETCH_EFLAGS(EFlags); \
11163 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
11164 \
11165 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
11166 IEM_MC_COMMIT_EFLAGS(EFlags); \
11167 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11168 IEM_MC_END(); \
11169 break; \
11170 \
11171 case IEMMODE_64BIT: \
11172 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11175 \
11176 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11177 IEMOP_HLP_DONE_DECODING(); \
11178 \
11179 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11180 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11181 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11182 \
11183 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11184 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11185 IEM_MC_FETCH_EFLAGS(EFlags); \
11186 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
11187 \
11188 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
11189 IEM_MC_COMMIT_EFLAGS(EFlags); \
11190 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11191 IEM_MC_END(); \
11192 break; \
11193 \
11194 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11195 } \
11196 } \
11197 } \
11198 (void)0
11199
11200/* Read-only version (bt) */
11201#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11202 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
11203 \
11204 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11205 { \
11206 /* register destination. */ \
11207 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11208 \
11209 switch (pVCpu->iem.s.enmEffOpSize) \
11210 { \
11211 case IEMMODE_16BIT: \
11212 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
11213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11214 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11215 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11216 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11217 \
11218 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11219 IEM_MC_REF_EFLAGS(pEFlags); \
11220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11221 \
11222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11223 IEM_MC_END(); \
11224 break; \
11225 \
11226 case IEMMODE_32BIT: \
11227 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
11228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11229 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11230 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11231 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11232 \
11233 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11234 IEM_MC_REF_EFLAGS(pEFlags); \
11235 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11236 \
11237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11238 IEM_MC_END(); \
11239 break; \
11240 \
11241 case IEMMODE_64BIT: \
11242 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
11243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11244 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11245 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11246 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11247 \
11248 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11249 IEM_MC_REF_EFLAGS(pEFlags); \
11250 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11251 \
11252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11253 IEM_MC_END(); \
11254 break; \
11255 \
11256 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11257 } \
11258 } \
11259 else \
11260 { \
11261 /* memory destination. */ \
11262 /** @todo test negative bit offsets! */ \
11263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11264 { \
11265 switch (pVCpu->iem.s.enmEffOpSize) \
11266 { \
11267 case IEMMODE_16BIT: \
11268 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11271 \
11272 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11273 IEMOP_HLP_DONE_DECODING(); \
11274 \
11275 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11276 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11277 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11278 \
11279 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11280 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11281 IEM_MC_FETCH_EFLAGS(EFlags); \
11282 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11283 \
11284 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11285 IEM_MC_COMMIT_EFLAGS(EFlags); \
11286 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11287 IEM_MC_END(); \
11288 break; \
11289 \
11290 case IEMMODE_32BIT: \
11291 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11294 \
11295 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11296 IEMOP_HLP_DONE_DECODING(); \
11297 \
11298 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11299 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11300 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11301 \
11302 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11304 IEM_MC_FETCH_EFLAGS(EFlags); \
11305 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11306 \
11307 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11308 IEM_MC_COMMIT_EFLAGS(EFlags); \
11309 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11310 IEM_MC_END(); \
11311 break; \
11312 \
11313 case IEMMODE_64BIT: \
11314 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11317 \
11318 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11319 IEMOP_HLP_DONE_DECODING(); \
11320 \
11321 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11322 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11323 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11324 \
11325 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11326 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11327 IEM_MC_FETCH_EFLAGS(EFlags); \
11328 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11329 \
11330 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11331 IEM_MC_COMMIT_EFLAGS(EFlags); \
11332 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11333 IEM_MC_END(); \
11334 break; \
11335 \
11336 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11337 } \
11338 } \
11339 else \
11340 { \
11341 IEMOP_HLP_DONE_DECODING(); \
11342 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11343 } \
11344 } \
11345 (void)0
11346
11347
11348/**
11349 * @opmaps grp8
11350 * @opcode /4
11351 * @oppfx n/a
11352 * @opflclass bitmap
11353 */
11354FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11355{
11356 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11357 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11358}
11359
11360
11361/**
11362 * @opmaps grp8
11363 * @opcode /5
11364 * @oppfx n/a
11365 * @opflclass bitmap
11366 */
11367FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11368{
11369 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11370 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11371 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11372}
11373
11374
11375/**
11376 * @opmaps grp8
11377 * @opcode /6
11378 * @oppfx n/a
11379 * @opflclass bitmap
11380 */
11381FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11382{
11383 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11384 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11385 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11386}
11387
11388
11389/**
11390 * @opmaps grp8
11391 * @opcode /7
11392 * @oppfx n/a
11393 * @opflclass bitmap
11394 */
11395FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11396{
11397 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11398 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11399 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11400}
11401
11402
11403/** Opcode 0x0f 0xba. */
11404FNIEMOP_DEF(iemOp_Grp8)
11405{
11406 IEMOP_HLP_MIN_386();
11407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11408 switch (IEM_GET_MODRM_REG_8(bRm))
11409 {
11410 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11411 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11412 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11413 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11414
11415 case 0: case 1: case 2: case 3:
11416 /* Both AMD and Intel want full modr/m decoding and imm8. */
11417 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11418
11419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11420 }
11421}
11422
11423
11424/**
11425 * @opcode 0xbb
11426 * @oppfx n/a
11427 * @opflclass bitmap
11428 */
11429FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11430{
11431 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11432 IEMOP_HLP_MIN_386();
11433 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11434 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11435}
11436
11437
11438/**
11439 * Body for BSF and BSR instructions.
11440 *
11441 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11442 * the destination register, which means that for 32-bit operations the high
11443 * bits must be left alone.
11444 *
11445 * @param pImpl Pointer to the instruction implementation (assembly).
11446 */
11447#define IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl) \
11448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
11449 \
11450 /* \
11451 * If rm is denoting a register, no more instruction bytes. \
11452 */ \
11453 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11454 { \
11455 switch (pVCpu->iem.s.enmEffOpSize) \
11456 { \
11457 case IEMMODE_16BIT: \
11458 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
11459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11460 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11461 IEM_MC_ARG(uint16_t, u16Src, 1); \
11462 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11463 \
11464 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11465 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11466 IEM_MC_REF_EFLAGS(pEFlags); \
11467 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); \
11468 \
11469 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11470 IEM_MC_END(); \
11471 break; \
11472 \
11473 case IEMMODE_32BIT: \
11474 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
11475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11476 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11477 IEM_MC_ARG(uint32_t, u32Src, 1); \
11478 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11479 \
11480 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11481 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11482 IEM_MC_REF_EFLAGS(pEFlags); \
11483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); \
11484 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11485 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11486 } IEM_MC_ENDIF(); \
11487 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11488 IEM_MC_END(); \
11489 break; \
11490 \
11491 case IEMMODE_64BIT: \
11492 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
11493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11494 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11495 IEM_MC_ARG(uint64_t, u64Src, 1); \
11496 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11497 \
11498 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11499 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11500 IEM_MC_REF_EFLAGS(pEFlags); \
11501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); \
11502 \
11503 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11504 IEM_MC_END(); \
11505 break; \
11506 \
11507 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11508 } \
11509 } \
11510 else \
11511 { \
11512 /* \
11513 * We're accessing memory. \
11514 */ \
11515 switch (pVCpu->iem.s.enmEffOpSize) \
11516 { \
11517 case IEMMODE_16BIT: \
11518 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
11519 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11520 IEM_MC_ARG(uint16_t, u16Src, 1); \
11521 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11523 \
11524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11526 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11527 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11528 IEM_MC_REF_EFLAGS(pEFlags); \
11529 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); \
11530 \
11531 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11532 IEM_MC_END(); \
11533 break; \
11534 \
11535 case IEMMODE_32BIT: \
11536 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
11537 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11538 IEM_MC_ARG(uint32_t, u32Src, 1); \
11539 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11541 \
11542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11544 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11545 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11546 IEM_MC_REF_EFLAGS(pEFlags); \
11547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); \
11548 \
11549 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
11550 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
11551 } IEM_MC_ENDIF(); \
11552 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11553 IEM_MC_END(); \
11554 break; \
11555 \
11556 case IEMMODE_64BIT: \
11557 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
11558 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11559 IEM_MC_ARG(uint64_t, u64Src, 1); \
11560 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11562 \
11563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11565 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11566 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11567 IEM_MC_REF_EFLAGS(pEFlags); \
11568 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); \
11569 \
11570 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11571 IEM_MC_END(); \
11572 break; \
11573 \
11574 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11575 } \
11576 } (void)0
11577
11578
11579/**
11580 * @opcode 0xbc
11581 * @oppfx !0xf3
11582 * @opfltest cf,pf,af,sf,of
11583 * @opflmodify cf,pf,af,zf,sf,of
11584 * @opflundef cf,pf,af,sf,of
11585 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11586 * document them as inputs. Sigh.
11587 */
11588FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11589{
11590 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11591 IEMOP_HLP_MIN_386();
11592 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11593 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags);
11594 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11595}
11596
11597
11598/**
11599 * @opcode 0xbc
11600 * @oppfx 0xf3
11601 * @opfltest pf,af,sf,of
11602 * @opflmodify cf,pf,af,zf,sf,of
11603 * @opflundef pf,af,sf,of
11604 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11605 * document them as inputs. Sigh.
11606 */
11607FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11608{
11609 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11610 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11611 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11612
11613#ifndef TST_IEM_CHECK_MC
11614 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11615 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11616 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11617 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11618 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11619 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11620 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11621 {
11622 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11623 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11624 };
11625#endif
11626 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11627 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11628 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11630 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, tzcnt, 0);
11631}
11632
11633
11634/**
11635 * @opcode 0xbd
11636 * @oppfx !0xf3
11637 * @opfltest cf,pf,af,sf,of
11638 * @opflmodify cf,pf,af,zf,sf,of
11639 * @opflundef cf,pf,af,sf,of
11640 * @todo AMD doesn't modify cf,pf,af,sf&of but since intel does, we're forced to
11641 * document them as inputs. Sigh.
11642 */
11643FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11644{
11645 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11646 IEMOP_HLP_MIN_386();
11647 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11648 PCIEMOPBINSIZES const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags);
11649 IEMOP_BODY_BIT_SCAN_OPERATOR_RV_RM(pImpl);
11650}
11651
11652
11653/**
11654 * @opcode 0xbd
11655 * @oppfx 0xf3
11656 * @opfltest pf,af,sf,of
11657 * @opflmodify cf,pf,af,zf,sf,of
11658 * @opflundef pf,af,sf,of
11659 * @todo AMD doesn't modify pf,af,sf&of but since intel does, we're forced to
11660 * document them as inputs. Sigh.
11661 */
11662FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11663{
11664 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11665 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11666 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11667
11668#ifndef TST_IEM_CHECK_MC
11669 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11670 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11671 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11672 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11673 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11674 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11675 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11676 {
11677 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11678 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11679 };
11680#endif
11681 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11682 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11683 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11685 IEMOP_BODY_BINARY_rv_rm(bRm, pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, IEM_MC_F_NOT_286_OR_OLDER, lzcnt, 0);
11686}
11687
11688
11689
11690/** Opcode 0x0f 0xbe. */
11691FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11692{
11693 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11694 IEMOP_HLP_MIN_386();
11695
11696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11697
11698 /*
11699 * If rm is denoting a register, no more instruction bytes.
11700 */
11701 if (IEM_IS_MODRM_REG_MODE(bRm))
11702 {
11703 switch (pVCpu->iem.s.enmEffOpSize)
11704 {
11705 case IEMMODE_16BIT:
11706 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11708 IEM_MC_LOCAL(uint16_t, u16Value);
11709 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11710 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11711 IEM_MC_ADVANCE_RIP_AND_FINISH();
11712 IEM_MC_END();
11713 break;
11714
11715 case IEMMODE_32BIT:
11716 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11718 IEM_MC_LOCAL(uint32_t, u32Value);
11719 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11720 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11721 IEM_MC_ADVANCE_RIP_AND_FINISH();
11722 IEM_MC_END();
11723 break;
11724
11725 case IEMMODE_64BIT:
11726 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11728 IEM_MC_LOCAL(uint64_t, u64Value);
11729 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11730 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11731 IEM_MC_ADVANCE_RIP_AND_FINISH();
11732 IEM_MC_END();
11733 break;
11734
11735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11736 }
11737 }
11738 else
11739 {
11740 /*
11741 * We're loading a register from memory.
11742 */
11743 switch (pVCpu->iem.s.enmEffOpSize)
11744 {
11745 case IEMMODE_16BIT:
11746 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11747 IEM_MC_LOCAL(uint16_t, u16Value);
11748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11751 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11752 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11753 IEM_MC_ADVANCE_RIP_AND_FINISH();
11754 IEM_MC_END();
11755 break;
11756
11757 case IEMMODE_32BIT:
11758 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11759 IEM_MC_LOCAL(uint32_t, u32Value);
11760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11763 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11764 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11765 IEM_MC_ADVANCE_RIP_AND_FINISH();
11766 IEM_MC_END();
11767 break;
11768
11769 case IEMMODE_64BIT:
11770 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11771 IEM_MC_LOCAL(uint64_t, u64Value);
11772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11775 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11776 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11777 IEM_MC_ADVANCE_RIP_AND_FINISH();
11778 IEM_MC_END();
11779 break;
11780
11781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11782 }
11783 }
11784}
11785
11786
11787/** Opcode 0x0f 0xbf. */
11788FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11789{
11790 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11791 IEMOP_HLP_MIN_386();
11792
11793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11794
11795 /** @todo Not entirely sure how the operand size prefix is handled here,
11796 * assuming that it will be ignored. Would be nice to have a few
11797 * test for this. */
11798 /*
11799 * If rm is denoting a register, no more instruction bytes.
11800 */
11801 if (IEM_IS_MODRM_REG_MODE(bRm))
11802 {
11803 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11804 {
11805 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11807 IEM_MC_LOCAL(uint32_t, u32Value);
11808 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11809 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11810 IEM_MC_ADVANCE_RIP_AND_FINISH();
11811 IEM_MC_END();
11812 }
11813 else
11814 {
11815 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11817 IEM_MC_LOCAL(uint64_t, u64Value);
11818 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11819 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11820 IEM_MC_ADVANCE_RIP_AND_FINISH();
11821 IEM_MC_END();
11822 }
11823 }
11824 else
11825 {
11826 /*
11827 * We're loading a register from memory.
11828 */
11829 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11830 {
11831 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11832 IEM_MC_LOCAL(uint32_t, u32Value);
11833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11836 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11837 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11838 IEM_MC_ADVANCE_RIP_AND_FINISH();
11839 IEM_MC_END();
11840 }
11841 else
11842 {
11843 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11844 IEM_MC_LOCAL(uint64_t, u64Value);
11845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11848 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11849 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11850 IEM_MC_ADVANCE_RIP_AND_FINISH();
11851 IEM_MC_END();
11852 }
11853 }
11854}
11855
11856
11857/**
11858 * @opcode 0xc0
11859 * @opflclass arithmetic
11860 */
11861FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11862{
11863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11864 IEMOP_HLP_MIN_486();
11865 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11866
11867 /*
11868 * If rm is denoting a register, no more instruction bytes.
11869 */
11870 if (IEM_IS_MODRM_REG_MODE(bRm))
11871 {
11872 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11874 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11875 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11876 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11877
11878 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11879 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11880 IEM_MC_REF_EFLAGS(pEFlags);
11881 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11882
11883 IEM_MC_ADVANCE_RIP_AND_FINISH();
11884 IEM_MC_END();
11885 }
11886 else
11887 {
11888 /*
11889 * We're accessing memory.
11890 */
11891#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11892 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \
11893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11895 IEMOP_HLP_DONE_DECODING(); \
11896 \
11897 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11898 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11899 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11900 \
11901 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11902 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11903 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11904 \
11905 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11906 IEM_MC_FETCH_EFLAGS(EFlags); \
11907 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11908 \
11909 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11910 IEM_MC_COMMIT_EFLAGS(EFlags); \
11911 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11912 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11913 IEM_MC_END()
11914 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11915 {
11916 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11917 }
11918 else
11919 {
11920 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11921 }
11922 }
11923}
11924
11925
11926/**
11927 * @opcode 0xc1
11928 * @opflclass arithmetic
11929 */
11930FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11931{
11932 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11933 IEMOP_HLP_MIN_486();
11934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11935
11936 /*
11937 * If rm is denoting a register, no more instruction bytes.
11938 */
11939 if (IEM_IS_MODRM_REG_MODE(bRm))
11940 {
11941 switch (pVCpu->iem.s.enmEffOpSize)
11942 {
11943 case IEMMODE_16BIT:
11944 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11946 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11947 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11949
11950 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11951 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11952 IEM_MC_REF_EFLAGS(pEFlags);
11953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11954
11955 IEM_MC_ADVANCE_RIP_AND_FINISH();
11956 IEM_MC_END();
11957 break;
11958
11959 case IEMMODE_32BIT:
11960 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11962 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11963 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11965
11966 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11967 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11968 IEM_MC_REF_EFLAGS(pEFlags);
11969 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11970
11971 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11972 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11973 IEM_MC_ADVANCE_RIP_AND_FINISH();
11974 IEM_MC_END();
11975 break;
11976
11977 case IEMMODE_64BIT:
11978 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11980 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11981 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11982 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11983
11984 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11985 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11986 IEM_MC_REF_EFLAGS(pEFlags);
11987 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11988
11989 IEM_MC_ADVANCE_RIP_AND_FINISH();
11990 IEM_MC_END();
11991 break;
11992
11993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11994 }
11995 }
11996 else
11997 {
11998 /*
11999 * We're accessing memory.
12000 */
12001#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
12002 do { \
12003 switch (pVCpu->iem.s.enmEffOpSize) \
12004 { \
12005 case IEMMODE_16BIT: \
12006 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \
12007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12009 IEMOP_HLP_DONE_DECODING(); \
12010 \
12011 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12012 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12013 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12014 \
12015 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
12016 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
12017 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
12018 \
12019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
12020 IEM_MC_FETCH_EFLAGS(EFlags); \
12021 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
12022 \
12023 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12024 IEM_MC_COMMIT_EFLAGS(EFlags); \
12025 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
12026 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12027 IEM_MC_END(); \
12028 break; \
12029 \
12030 case IEMMODE_32BIT: \
12031 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \
12032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12034 IEMOP_HLP_DONE_DECODING(); \
12035 \
12036 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12037 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12038 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12039 \
12040 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
12041 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
12042 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
12043 \
12044 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
12045 IEM_MC_FETCH_EFLAGS(EFlags); \
12046 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
12047 \
12048 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12049 IEM_MC_COMMIT_EFLAGS(EFlags); \
12050 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
12051 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12052 IEM_MC_END(); \
12053 break; \
12054 \
12055 case IEMMODE_64BIT: \
12056 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
12057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12059 IEMOP_HLP_DONE_DECODING(); \
12060 \
12061 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12062 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12063 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12064 \
12065 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
12066 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
12067 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
12068 \
12069 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
12070 IEM_MC_FETCH_EFLAGS(EFlags); \
12071 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
12072 \
12073 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12074 IEM_MC_COMMIT_EFLAGS(EFlags); \
12075 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
12076 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12077 IEM_MC_END(); \
12078 break; \
12079 \
12080 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12081 } \
12082 } while (0)
12083
12084 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12085 {
12086 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
12087 }
12088 else
12089 {
12090 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
12091 }
12092 }
12093}
12094
12095
12096/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
12097FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
12098{
12099 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12100
12101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12102 if (IEM_IS_MODRM_REG_MODE(bRm))
12103 {
12104 /*
12105 * XMM, XMM.
12106 */
12107 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12108 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12110 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12111 IEM_MC_LOCAL(X86XMMREG, Dst);
12112 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12113 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12114 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12115 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12117 IEM_MC_PREPARE_SSE_USAGE();
12118 IEM_MC_REF_MXCSR(pfMxcsr);
12119 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12120 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
12121 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12122 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12123 } IEM_MC_ELSE() {
12124 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
12125 } IEM_MC_ENDIF();
12126
12127 IEM_MC_ADVANCE_RIP_AND_FINISH();
12128 IEM_MC_END();
12129 }
12130 else
12131 {
12132 /*
12133 * XMM, [mem128].
12134 */
12135 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12136 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12137 IEM_MC_LOCAL(X86XMMREG, Dst);
12138 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12139 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12140 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12142
12143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12144 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12145 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12147 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12148 IEM_MC_PREPARE_SSE_USAGE();
12149
12150 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12151 IEM_MC_REF_MXCSR(pfMxcsr);
12152 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
12153 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12154 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12155 } IEM_MC_ELSE() {
12156 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
12157 } IEM_MC_ENDIF();
12158
12159 IEM_MC_ADVANCE_RIP_AND_FINISH();
12160 IEM_MC_END();
12161 }
12162}
12163
12164
12165/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
12166FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
12167{
12168 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12169
12170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12171 if (IEM_IS_MODRM_REG_MODE(bRm))
12172 {
12173 /*
12174 * XMM, XMM.
12175 */
12176 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12177 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12179 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12180 IEM_MC_LOCAL(X86XMMREG, Dst);
12181 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12182 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12183 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12184 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12185 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12186 IEM_MC_PREPARE_SSE_USAGE();
12187 IEM_MC_REF_MXCSR(pfMxcsr);
12188 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12189 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12190 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12191 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12192 } IEM_MC_ELSE() {
12193 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
12194 } IEM_MC_ENDIF();
12195
12196 IEM_MC_ADVANCE_RIP_AND_FINISH();
12197 IEM_MC_END();
12198 }
12199 else
12200 {
12201 /*
12202 * XMM, [mem128].
12203 */
12204 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12205 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12206 IEM_MC_LOCAL(X86XMMREG, Dst);
12207 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12208 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12209 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12211
12212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12213 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12214 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12216 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12217 IEM_MC_PREPARE_SSE_USAGE();
12218
12219 IEM_MC_REF_MXCSR(pfMxcsr);
12220 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12221 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12222 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12223 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12224 } IEM_MC_ELSE() {
12225 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
12226 } IEM_MC_ENDIF();
12227
12228 IEM_MC_ADVANCE_RIP_AND_FINISH();
12229 IEM_MC_END();
12230 }
12231}
12232
12233
12234/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
12235FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
12236{
12237 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12238
12239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12240 if (IEM_IS_MODRM_REG_MODE(bRm))
12241 {
12242 /*
12243 * XMM32, XMM32.
12244 */
12245 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12246 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12248 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12249 IEM_MC_LOCAL(X86XMMREG, Dst);
12250 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12251 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12252 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12253 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12254 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12255 IEM_MC_PREPARE_SSE_USAGE();
12256 IEM_MC_REF_MXCSR(pfMxcsr);
12257 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12258 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
12259 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12260 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12261 } IEM_MC_ELSE() {
12262 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
12263 } IEM_MC_ENDIF();
12264
12265 IEM_MC_ADVANCE_RIP_AND_FINISH();
12266 IEM_MC_END();
12267 }
12268 else
12269 {
12270 /*
12271 * XMM32, [mem32].
12272 */
12273 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12274 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12275 IEM_MC_LOCAL(X86XMMREG, Dst);
12276 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12277 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12278 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12280
12281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12282 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12283 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12285 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12286 IEM_MC_PREPARE_SSE_USAGE();
12287
12288 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12289 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12290 IEM_MC_REF_MXCSR(pfMxcsr);
12291 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
12292 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12293 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12294 } IEM_MC_ELSE() {
12295 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
12296 } IEM_MC_ENDIF();
12297
12298 IEM_MC_ADVANCE_RIP_AND_FINISH();
12299 IEM_MC_END();
12300 }
12301}
12302
12303
12304/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
12305FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
12306{
12307 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12308
12309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12310 if (IEM_IS_MODRM_REG_MODE(bRm))
12311 {
12312 /*
12313 * XMM64, XMM64.
12314 */
12315 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12316 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12318 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12319 IEM_MC_LOCAL(X86XMMREG, Dst);
12320 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12321 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12322 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12323 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12324 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12325 IEM_MC_PREPARE_SSE_USAGE();
12326 IEM_MC_REF_MXCSR(pfMxcsr);
12327 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12328 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12329 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12330 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12331 } IEM_MC_ELSE() {
12332 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12333 } IEM_MC_ENDIF();
12334
12335 IEM_MC_ADVANCE_RIP_AND_FINISH();
12336 IEM_MC_END();
12337 }
12338 else
12339 {
12340 /*
12341 * XMM64, [mem64].
12342 */
12343 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12344 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12345 IEM_MC_LOCAL(X86XMMREG, Dst);
12346 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12347 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12348 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12350
12351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12352 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12353 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12355 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12356 IEM_MC_PREPARE_SSE_USAGE();
12357
12358 IEM_MC_REF_MXCSR(pfMxcsr);
12359 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12360 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12361 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12362 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12363 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12364 } IEM_MC_ELSE() {
12365 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12366 } IEM_MC_ENDIF();
12367
12368 IEM_MC_ADVANCE_RIP_AND_FINISH();
12369 IEM_MC_END();
12370 }
12371}
12372
12373
12374/** Opcode 0x0f 0xc3. */
12375FNIEMOP_DEF(iemOp_movnti_My_Gy)
12376{
12377 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12378
12379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12380
12381 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12382 if (IEM_IS_MODRM_MEM_MODE(bRm))
12383 {
12384 switch (pVCpu->iem.s.enmEffOpSize)
12385 {
12386 case IEMMODE_32BIT:
12387 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
12388 IEM_MC_LOCAL(uint32_t, u32Value);
12389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12390
12391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12393
12394 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12395 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12396 IEM_MC_ADVANCE_RIP_AND_FINISH();
12397 IEM_MC_END();
12398 break;
12399
12400 case IEMMODE_64BIT:
12401 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
12402 IEM_MC_LOCAL(uint64_t, u64Value);
12403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12404
12405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12407
12408 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12409 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12410 IEM_MC_ADVANCE_RIP_AND_FINISH();
12411 IEM_MC_END();
12412 break;
12413
12414 case IEMMODE_16BIT:
12415 /** @todo check this form. */
12416 IEMOP_RAISE_INVALID_OPCODE_RET();
12417
12418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12419 }
12420 }
12421 else
12422 IEMOP_RAISE_INVALID_OPCODE_RET();
12423}
12424
12425
12426/* Opcode 0x66 0x0f 0xc3 - invalid */
12427/* Opcode 0xf3 0x0f 0xc3 - invalid */
12428/* Opcode 0xf2 0x0f 0xc3 - invalid */
12429
12430
12431/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12432FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12433{
12434 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12436 if (IEM_IS_MODRM_REG_MODE(bRm))
12437 {
12438 /*
12439 * Register, register.
12440 */
12441 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12442 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12445 IEM_MC_ARG(uint16_t, u16Src, 1);
12446 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12447 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12448 IEM_MC_PREPARE_FPU_USAGE();
12449 IEM_MC_FPU_TO_MMX_MODE();
12450 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12451 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12452 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12453 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12454 IEM_MC_ADVANCE_RIP_AND_FINISH();
12455 IEM_MC_END();
12456 }
12457 else
12458 {
12459 /*
12460 * Register, memory.
12461 */
12462 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12463 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12464 IEM_MC_ARG(uint16_t, u16Src, 1);
12465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12466
12467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12468 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12469 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12471 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12472 IEM_MC_PREPARE_FPU_USAGE();
12473
12474 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12475 IEM_MC_FPU_TO_MMX_MODE();
12476
12477 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12478 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12479 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12480 IEM_MC_ADVANCE_RIP_AND_FINISH();
12481 IEM_MC_END();
12482 }
12483}
12484
12485
12486/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12487FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12488{
12489 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12491 if (IEM_IS_MODRM_REG_MODE(bRm))
12492 {
12493 /*
12494 * Register, register.
12495 */
12496 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12497 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12499 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12500 IEM_MC_ARG(uint16_t, u16Src, 1);
12501 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12502 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12503 IEM_MC_PREPARE_SSE_USAGE();
12504 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12505 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12506 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12507 IEM_MC_ADVANCE_RIP_AND_FINISH();
12508 IEM_MC_END();
12509 }
12510 else
12511 {
12512 /*
12513 * Register, memory.
12514 */
12515 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12516 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12517 IEM_MC_ARG(uint16_t, u16Src, 1);
12518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12519
12520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12521 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12522 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12525 IEM_MC_PREPARE_SSE_USAGE();
12526
12527 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12528 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12529 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12530 IEM_MC_ADVANCE_RIP_AND_FINISH();
12531 IEM_MC_END();
12532 }
12533}
12534
12535
12536/* Opcode 0xf3 0x0f 0xc4 - invalid */
12537/* Opcode 0xf2 0x0f 0xc4 - invalid */
12538
12539
12540/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12541FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12542{
12543 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12545 if (IEM_IS_MODRM_REG_MODE(bRm))
12546 {
12547 /*
12548 * Greg32, MMX, imm8.
12549 */
12550 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12551 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12553 IEM_MC_LOCAL(uint16_t, u16Dst);
12554 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12555 IEM_MC_ARG(uint64_t, u64Src, 1);
12556 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12557 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12558 IEM_MC_PREPARE_FPU_USAGE();
12559 IEM_MC_FPU_TO_MMX_MODE();
12560 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12562 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12563 IEM_MC_ADVANCE_RIP_AND_FINISH();
12564 IEM_MC_END();
12565 }
12566 /* No memory operand. */
12567 else
12568 IEMOP_RAISE_INVALID_OPCODE_RET();
12569}
12570
12571
12572/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12573FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12574{
12575 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12577 if (IEM_IS_MODRM_REG_MODE(bRm))
12578 {
12579 /*
12580 * Greg32, XMM, imm8.
12581 */
12582 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12583 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12585 IEM_MC_LOCAL(uint16_t, u16Dst);
12586 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12587 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12588 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12589 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12590 IEM_MC_PREPARE_SSE_USAGE();
12591 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12592 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12593 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12594 IEM_MC_ADVANCE_RIP_AND_FINISH();
12595 IEM_MC_END();
12596 }
12597 /* No memory operand. */
12598 else
12599 IEMOP_RAISE_INVALID_OPCODE_RET();
12600}
12601
12602
12603/* Opcode 0xf3 0x0f 0xc5 - invalid */
12604/* Opcode 0xf2 0x0f 0xc5 - invalid */
12605
12606
12607/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12608FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12609{
12610 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12612 if (IEM_IS_MODRM_REG_MODE(bRm))
12613 {
12614 /*
12615 * XMM, XMM, imm8.
12616 */
12617 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12618 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12620 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12621 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12622 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12623 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12624 IEM_MC_PREPARE_SSE_USAGE();
12625 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12626 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12627 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12628 IEM_MC_ADVANCE_RIP_AND_FINISH();
12629 IEM_MC_END();
12630 }
12631 else
12632 {
12633 /*
12634 * XMM, [mem128], imm8.
12635 */
12636 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12637 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12638 IEM_MC_LOCAL(RTUINT128U, uSrc);
12639 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12641
12642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12643 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12644 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12646 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12647 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12648
12649 IEM_MC_PREPARE_SSE_USAGE();
12650 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12651 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12652
12653 IEM_MC_ADVANCE_RIP_AND_FINISH();
12654 IEM_MC_END();
12655 }
12656}
12657
12658
12659/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12660FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12661{
12662 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12664 if (IEM_IS_MODRM_REG_MODE(bRm))
12665 {
12666 /*
12667 * XMM, XMM, imm8.
12668 */
12669 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12670 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12672 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12673 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12674 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12675 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12676 IEM_MC_PREPARE_SSE_USAGE();
12677 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12678 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12679 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12680 IEM_MC_ADVANCE_RIP_AND_FINISH();
12681 IEM_MC_END();
12682 }
12683 else
12684 {
12685 /*
12686 * XMM, [mem128], imm8.
12687 */
12688 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12689 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12690 IEM_MC_LOCAL(RTUINT128U, uSrc);
12691 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12693
12694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12695 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12696 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12699 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12700
12701 IEM_MC_PREPARE_SSE_USAGE();
12702 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12703 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12704
12705 IEM_MC_ADVANCE_RIP_AND_FINISH();
12706 IEM_MC_END();
12707 }
12708}
12709
12710
12711/* Opcode 0xf3 0x0f 0xc6 - invalid */
12712/* Opcode 0xf2 0x0f 0xc6 - invalid */
12713
12714
12715/**
12716 * @opmaps grp9
12717 * @opcode /1
12718 * @opcodesub !11 mr/reg rex.w=0
12719 * @oppfx n/a
12720 * @opflmodify zf
12721 */
12722FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12723{
12724 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12725#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12726 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0); \
12727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12729 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12730 \
12731 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12732 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12733 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12734 \
12735 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12736 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12737 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12738 \
12739 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12740 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12741 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12742 \
12743 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12744 IEM_MC_FETCH_EFLAGS(EFlags); \
12745 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12746 \
12747 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12748 IEM_MC_COMMIT_EFLAGS(EFlags); \
12749 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12750 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12751 } IEM_MC_ENDIF(); \
12752 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12753 \
12754 IEM_MC_END()
12755 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12756 {
12757 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12758 }
12759 else
12760 {
12761 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12762 }
12763}
12764
12765
12766/**
12767 * @opmaps grp9
12768 * @opcode /1
12769 * @opcodesub !11 mr/reg rex.w=1
12770 * @oppfx n/a
12771 * @opflmodify zf
12772 */
12773FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12774{
12775 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12776 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12777 {
12778 /*
12779 * This is hairy, very hairy macro fun. We're walking a fine line
12780 * here to make the code parsable by IEMAllInstPython.py and fit into
12781 * the patterns IEMAllThrdPython.py requires for the code morphing.
12782 */
12783#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12784 IEM_MC_BEGIN(5, 4, IEM_MC_F_64BIT, 0); \
12785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12787 IEMOP_HLP_DONE_DECODING(); \
12788 \
12789 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12790 bUnmapInfoStmt; \
12791 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12792 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12793 \
12794 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12795 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12796 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12797 \
12798 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12799 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12800 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12801 \
12802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12803 IEM_MC_FETCH_EFLAGS(EFlags)
12804
12805#define BODY_CMPXCHG16B_TAIL(a_Type) \
12806 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12807 IEM_MC_COMMIT_EFLAGS(EFlags); \
12808 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12809 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12810 } IEM_MC_ENDIF(); \
12811 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12812 IEM_MC_END()
12813
12814#ifdef RT_ARCH_AMD64
12815 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12816 {
12817 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12818 {
12819 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12820 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12821 BODY_CMPXCHG16B_TAIL(RW);
12822 }
12823 else
12824 {
12825 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12826 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12827 BODY_CMPXCHG16B_TAIL(ATOMIC);
12828 }
12829 }
12830 else
12831 { /* (see comments in #else case below) */
12832 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12833 {
12834 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12835 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12836 BODY_CMPXCHG16B_TAIL(RW);
12837 }
12838 else
12839 {
12840 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12841 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12842 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12843 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12844 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12845 pEFlags, bUnmapInfo);
12846 IEM_MC_END();
12847 }
12848 }
12849
12850#elif defined(RT_ARCH_ARM64)
12851 /** @todo may require fallback for unaligned accesses... */
12852 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12853 {
12854 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12855 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12856 BODY_CMPXCHG16B_TAIL(RW);
12857 }
12858 else
12859 {
12860 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12861 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12862 BODY_CMPXCHG16B_TAIL(ATOMIC);
12863 }
12864
12865#else
12866 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12867 accesses and not all all atomic, which works fine on in UNI CPU guest
12868 configuration (ignoring DMA). If guest SMP is active we have no choice
12869 but to use a rendezvous callback here. Sigh. */
12870 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12871 {
12872 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12873 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12874 BODY_CMPXCHG16B_TAIL(RW);
12875 }
12876 else
12877 {
12878 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12879 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12880 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12881 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12882 iemCImpl_cmpxchg16b_fallback_rendezvous,
12883 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12884 IEM_MC_END();
12885 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12886 }
12887#endif
12888
12889#undef BODY_CMPXCHG16B
12890 }
12891 Log(("cmpxchg16b -> #UD\n"));
12892 IEMOP_RAISE_INVALID_OPCODE_RET();
12893}
12894
12895FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12896{
12897 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12898 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12899 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12900}
12901
12902
12903/** Opcode 0x0f 0xc7 11/6. */
12904FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12905{
12906 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12907 IEMOP_RAISE_INVALID_OPCODE_RET();
12908
12909 if (IEM_IS_MODRM_REG_MODE(bRm))
12910 {
12911 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12913 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12914 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12915 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12916 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12917 iemCImpl_rdrand, iReg, enmEffOpSize);
12918 IEM_MC_END();
12919 }
12920 /* Register only. */
12921 else
12922 IEMOP_RAISE_INVALID_OPCODE_RET();
12923}
12924
12925/** Opcode 0x0f 0xc7 !11/6. */
12926#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12927FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12928{
12929 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12930 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12931 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12932 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12933 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12935 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12936 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12937 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12938 IEM_MC_END();
12939}
12940#else
12941FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12942#endif
12943
12944/** Opcode 0x66 0x0f 0xc7 !11/6. */
12945#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12946FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12947{
12948 IEMOP_MNEMONIC(vmclear, "vmclear");
12949 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12950 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12951 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12952 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12954 IEMOP_HLP_DONE_DECODING();
12955 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12956 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12957 IEM_MC_END();
12958}
12959#else
12960FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12961#endif
12962
12963/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12964#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12965FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12966{
12967 IEMOP_MNEMONIC(vmxon, "vmxon");
12968 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12969 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12970 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12972 IEMOP_HLP_DONE_DECODING();
12973 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12974 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12975 IEM_MC_END();
12976}
12977#else
12978FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12979#endif
12980
12981/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12982#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12983FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12984{
12985 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12986 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12987 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12988 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12989 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12991 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12992 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12993 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12994 IEM_MC_END();
12995}
12996#else
12997FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12998#endif
12999
13000/** Opcode 0x0f 0xc7 11/7. */
13001FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
13002{
13003 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
13004 IEMOP_RAISE_INVALID_OPCODE_RET();
13005
13006 if (IEM_IS_MODRM_REG_MODE(bRm))
13007 {
13008 /* register destination. */
13009 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13011 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
13012 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
13013 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
13014 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
13015 iemCImpl_rdseed, iReg, enmEffOpSize);
13016 IEM_MC_END();
13017 }
13018 /* Register only. */
13019 else
13020 IEMOP_RAISE_INVALID_OPCODE_RET();
13021}
13022
13023/**
13024 * Group 9 jump table for register variant.
13025 */
13026IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
13027{ /* pfx: none, 066h, 0f3h, 0f2h */
13028 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
13029 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
13030 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
13031 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
13032 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
13033 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
13034 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
13035 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
13036};
13037AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
13038
13039
13040/**
13041 * Group 9 jump table for memory variant.
13042 */
13043IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
13044{ /* pfx: none, 066h, 0f3h, 0f2h */
13045 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
13046 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
13047 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
13048 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
13049 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
13050 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
13051 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
13052 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
13053};
13054AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
13055
13056
13057/** Opcode 0x0f 0xc7. */
13058FNIEMOP_DEF(iemOp_Grp9)
13059{
13060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13061 if (IEM_IS_MODRM_REG_MODE(bRm))
13062 /* register, register */
13063 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
13064 + pVCpu->iem.s.idxPrefix], bRm);
13065 /* memory, register */
13066 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
13067 + pVCpu->iem.s.idxPrefix], bRm);
13068}
13069
13070
13071/**
13072 * Common 'bswap register' helper.
13073 */
13074FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
13075{
13076 switch (pVCpu->iem.s.enmEffOpSize)
13077 {
13078 case IEMMODE_16BIT:
13079 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
13080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13081 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13082 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
13083 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
13084 IEM_MC_ADVANCE_RIP_AND_FINISH();
13085 IEM_MC_END();
13086 break;
13087
13088 case IEMMODE_32BIT:
13089 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
13090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13091 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13092 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
13093 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
13094 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
13095 IEM_MC_ADVANCE_RIP_AND_FINISH();
13096 IEM_MC_END();
13097 break;
13098
13099 case IEMMODE_64BIT:
13100 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13102 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13103 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
13104 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
13105 IEM_MC_ADVANCE_RIP_AND_FINISH();
13106 IEM_MC_END();
13107 break;
13108
13109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13110 }
13111}
13112
13113
13114/** Opcode 0x0f 0xc8. */
13115FNIEMOP_DEF(iemOp_bswap_rAX_r8)
13116{
13117 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
13118 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
13119 prefix. REX.B is the correct prefix it appears. For a parallel
13120 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
13121 IEMOP_HLP_MIN_486();
13122 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13123}
13124
13125
13126/** Opcode 0x0f 0xc9. */
13127FNIEMOP_DEF(iemOp_bswap_rCX_r9)
13128{
13129 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
13130 IEMOP_HLP_MIN_486();
13131 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13132}
13133
13134
13135/** Opcode 0x0f 0xca. */
13136FNIEMOP_DEF(iemOp_bswap_rDX_r10)
13137{
13138 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
13139 IEMOP_HLP_MIN_486();
13140 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13141}
13142
13143
13144/** Opcode 0x0f 0xcb. */
13145FNIEMOP_DEF(iemOp_bswap_rBX_r11)
13146{
13147 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
13148 IEMOP_HLP_MIN_486();
13149 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13150}
13151
13152
13153/** Opcode 0x0f 0xcc. */
13154FNIEMOP_DEF(iemOp_bswap_rSP_r12)
13155{
13156 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
13157 IEMOP_HLP_MIN_486();
13158 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13159}
13160
13161
13162/** Opcode 0x0f 0xcd. */
13163FNIEMOP_DEF(iemOp_bswap_rBP_r13)
13164{
13165 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
13166 IEMOP_HLP_MIN_486();
13167 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13168}
13169
13170
13171/** Opcode 0x0f 0xce. */
13172FNIEMOP_DEF(iemOp_bswap_rSI_r14)
13173{
13174 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
13175 IEMOP_HLP_MIN_486();
13176 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13177}
13178
13179
13180/** Opcode 0x0f 0xcf. */
13181FNIEMOP_DEF(iemOp_bswap_rDI_r15)
13182{
13183 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
13184 IEMOP_HLP_MIN_486();
13185 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13186}
13187
13188
13189/* Opcode 0x0f 0xd0 - invalid */
13190
13191
13192/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
13193FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
13194{
13195 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13196 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
13197}
13198
13199
13200/* Opcode 0xf3 0x0f 0xd0 - invalid */
13201
13202
13203/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
13204FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
13205{
13206 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13207 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
13208}
13209
13210
13211
13212/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
13213FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
13214{
13215 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13216 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
13217}
13218
13219/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
13220FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
13221{
13222 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13223 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
13224}
13225
13226/* Opcode 0xf3 0x0f 0xd1 - invalid */
13227/* Opcode 0xf2 0x0f 0xd1 - invalid */
13228
13229/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
13230FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
13231{
13232 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13233 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
13234}
13235
13236
13237/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
13238FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
13239{
13240 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13241 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
13242}
13243
13244
13245/* Opcode 0xf3 0x0f 0xd2 - invalid */
13246/* Opcode 0xf2 0x0f 0xd2 - invalid */
13247
13248/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
13249FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
13250{
13251 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13252 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
13253}
13254
13255
13256/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
13257FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
13258{
13259 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13260 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
13261}
13262
13263
13264/* Opcode 0xf3 0x0f 0xd3 - invalid */
13265/* Opcode 0xf2 0x0f 0xd3 - invalid */
13266
13267
13268/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
13269FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
13270{
13271 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13272 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
13273}
13274
13275
13276/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
13277FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
13278{
13279 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13280 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
13281}
13282
13283
13284/* Opcode 0xf3 0x0f 0xd4 - invalid */
13285/* Opcode 0xf2 0x0f 0xd4 - invalid */
13286
13287/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
13288FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
13289{
13290 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13291 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
13292}
13293
13294/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
13295FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
13296{
13297 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13298 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
13299}
13300
13301
13302/* Opcode 0xf3 0x0f 0xd5 - invalid */
13303/* Opcode 0xf2 0x0f 0xd5 - invalid */
13304
13305/* Opcode 0x0f 0xd6 - invalid */
13306
13307/**
13308 * @opcode 0xd6
13309 * @oppfx 0x66
13310 * @opcpuid sse2
13311 * @opgroup og_sse2_pcksclr_datamove
13312 * @opxcpttype none
13313 * @optest op1=-1 op2=2 -> op1=2
13314 * @optest op1=0 op2=-42 -> op1=-42
13315 */
13316FNIEMOP_DEF(iemOp_movq_Wq_Vq)
13317{
13318 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13320 if (IEM_IS_MODRM_REG_MODE(bRm))
13321 {
13322 /*
13323 * Register, register.
13324 */
13325 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13327 IEM_MC_LOCAL(uint64_t, uSrc);
13328
13329 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13330 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13331
13332 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13333 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
13334
13335 IEM_MC_ADVANCE_RIP_AND_FINISH();
13336 IEM_MC_END();
13337 }
13338 else
13339 {
13340 /*
13341 * Memory, register.
13342 */
13343 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13344 IEM_MC_LOCAL(uint64_t, uSrc);
13345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13346
13347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13349 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13350 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13351
13352 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13353 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13354
13355 IEM_MC_ADVANCE_RIP_AND_FINISH();
13356 IEM_MC_END();
13357 }
13358}
13359
13360
13361/**
13362 * @opcode 0xd6
13363 * @opcodesub 11 mr/reg
13364 * @oppfx f3
13365 * @opcpuid sse2
13366 * @opgroup og_sse2_simdint_datamove
13367 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13368 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13369 */
13370FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13371{
13372 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13373 if (IEM_IS_MODRM_REG_MODE(bRm))
13374 {
13375 /*
13376 * Register, register.
13377 */
13378 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13379 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13381 IEM_MC_LOCAL(uint64_t, uSrc);
13382
13383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13384 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13385 IEM_MC_FPU_TO_MMX_MODE();
13386
13387 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13388 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13389
13390 IEM_MC_ADVANCE_RIP_AND_FINISH();
13391 IEM_MC_END();
13392 }
13393
13394 /**
13395 * @opdone
13396 * @opmnemonic udf30fd6mem
13397 * @opcode 0xd6
13398 * @opcodesub !11 mr/reg
13399 * @oppfx f3
13400 * @opunused intel-modrm
13401 * @opcpuid sse
13402 * @optest ->
13403 */
13404 else
13405 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13406}
13407
13408
13409/**
13410 * @opcode 0xd6
13411 * @opcodesub 11 mr/reg
13412 * @oppfx f2
13413 * @opcpuid sse2
13414 * @opgroup og_sse2_simdint_datamove
13415 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13416 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13417 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13418 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13419 * @optest op1=-42 op2=0xfedcba9876543210
13420 * -> op1=0xfedcba9876543210 ftw=0xff
13421 */
13422FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13423{
13424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13425 if (IEM_IS_MODRM_REG_MODE(bRm))
13426 {
13427 /*
13428 * Register, register.
13429 */
13430 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13431 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13433 IEM_MC_LOCAL(uint64_t, uSrc);
13434
13435 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13436 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13437 IEM_MC_FPU_TO_MMX_MODE();
13438
13439 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13440 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13441
13442 IEM_MC_ADVANCE_RIP_AND_FINISH();
13443 IEM_MC_END();
13444 }
13445
13446 /**
13447 * @opdone
13448 * @opmnemonic udf20fd6mem
13449 * @opcode 0xd6
13450 * @opcodesub !11 mr/reg
13451 * @oppfx f2
13452 * @opunused intel-modrm
13453 * @opcpuid sse
13454 * @optest ->
13455 */
13456 else
13457 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13458}
13459
13460
13461/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13462FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13463{
13464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13465 /* Docs says register only. */
13466 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13467 {
13468 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13469 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13470 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13472 IEM_MC_ARG(uint64_t *, puDst, 0);
13473 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13474 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13475 IEM_MC_PREPARE_FPU_USAGE();
13476 IEM_MC_FPU_TO_MMX_MODE();
13477
13478 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13479 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13480 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13481
13482 IEM_MC_ADVANCE_RIP_AND_FINISH();
13483 IEM_MC_END();
13484 }
13485 else
13486 IEMOP_RAISE_INVALID_OPCODE_RET();
13487}
13488
13489
13490/** Opcode 0x66 0x0f 0xd7 - */
13491FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13492{
13493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13494 /* Docs says register only. */
13495 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13496 {
13497 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13498 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13499 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13501 IEM_MC_ARG(uint64_t *, puDst, 0);
13502 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13503 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13504 IEM_MC_PREPARE_SSE_USAGE();
13505 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13506 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13507 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13508 IEM_MC_ADVANCE_RIP_AND_FINISH();
13509 IEM_MC_END();
13510 }
13511 else
13512 IEMOP_RAISE_INVALID_OPCODE_RET();
13513}
13514
13515
13516/* Opcode 0xf3 0x0f 0xd7 - invalid */
13517/* Opcode 0xf2 0x0f 0xd7 - invalid */
13518
13519
13520/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13521FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13522{
13523 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13524 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13525}
13526
13527
13528/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13529FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13530{
13531 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13532 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13533}
13534
13535
13536/* Opcode 0xf3 0x0f 0xd8 - invalid */
13537/* Opcode 0xf2 0x0f 0xd8 - invalid */
13538
13539/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13540FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13541{
13542 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13543 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13544}
13545
13546
13547/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13548FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13549{
13550 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13551 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13552}
13553
13554
13555/* Opcode 0xf3 0x0f 0xd9 - invalid */
13556/* Opcode 0xf2 0x0f 0xd9 - invalid */
13557
13558/** Opcode 0x0f 0xda - pminub Pq, Qq */
13559FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13560{
13561 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13562 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13563}
13564
13565
13566/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13567FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13568{
13569 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13570 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13571}
13572
13573/* Opcode 0xf3 0x0f 0xda - invalid */
13574/* Opcode 0xf2 0x0f 0xda - invalid */
13575
13576/** Opcode 0x0f 0xdb - pand Pq, Qq */
13577FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13578{
13579 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13580 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13581}
13582
13583
13584/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13585FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13586{
13587 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13588 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13589}
13590
13591
13592/* Opcode 0xf3 0x0f 0xdb - invalid */
13593/* Opcode 0xf2 0x0f 0xdb - invalid */
13594
13595/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13596FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13597{
13598 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13599 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13600}
13601
13602
13603/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13604FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13605{
13606 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13607 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13608}
13609
13610
13611/* Opcode 0xf3 0x0f 0xdc - invalid */
13612/* Opcode 0xf2 0x0f 0xdc - invalid */
13613
13614/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13615FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13616{
13617 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13618 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13619}
13620
13621
13622/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13623FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13624{
13625 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13626 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13627}
13628
13629
13630/* Opcode 0xf3 0x0f 0xdd - invalid */
13631/* Opcode 0xf2 0x0f 0xdd - invalid */
13632
13633/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13634FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13635{
13636 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13637 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13638}
13639
13640
13641/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13642FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13643{
13644 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13645 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13646}
13647
13648/* Opcode 0xf3 0x0f 0xde - invalid */
13649/* Opcode 0xf2 0x0f 0xde - invalid */
13650
13651
13652/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13653FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13654{
13655 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13656 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13657}
13658
13659
13660/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13661FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13662{
13663 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13664 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13665}
13666
13667
13668/* Opcode 0xf3 0x0f 0xdf - invalid */
13669/* Opcode 0xf2 0x0f 0xdf - invalid */
13670
13671/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13672FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13673{
13674 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13675 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13676}
13677
13678
13679/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13680FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13681{
13682 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13683 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13684}
13685
13686
13687/* Opcode 0xf3 0x0f 0xe0 - invalid */
13688/* Opcode 0xf2 0x0f 0xe0 - invalid */
13689
13690/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13691FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13692{
13693 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13694 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13695}
13696
13697
13698/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13699FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13700{
13701 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13702 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13703}
13704
13705
13706/* Opcode 0xf3 0x0f 0xe1 - invalid */
13707/* Opcode 0xf2 0x0f 0xe1 - invalid */
13708
13709/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13710FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13711{
13712 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13713 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13714}
13715
13716
13717/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13718FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13719{
13720 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13721 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13722}
13723
13724
13725/* Opcode 0xf3 0x0f 0xe2 - invalid */
13726/* Opcode 0xf2 0x0f 0xe2 - invalid */
13727
13728/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13729FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13730{
13731 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13732 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13733}
13734
13735
13736/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13737FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13738{
13739 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13740 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13741}
13742
13743
13744/* Opcode 0xf3 0x0f 0xe3 - invalid */
13745/* Opcode 0xf2 0x0f 0xe3 - invalid */
13746
13747/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13748FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13749{
13750 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13751 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13752}
13753
13754
13755/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13756FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13757{
13758 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13759 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13760}
13761
13762
13763/* Opcode 0xf3 0x0f 0xe4 - invalid */
13764/* Opcode 0xf2 0x0f 0xe4 - invalid */
13765
13766/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13767FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13768{
13769 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13770 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13771}
13772
13773
13774/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13775FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13776{
13777 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13778 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13779}
13780
13781
13782/* Opcode 0xf3 0x0f 0xe5 - invalid */
13783/* Opcode 0xf2 0x0f 0xe5 - invalid */
13784/* Opcode 0x0f 0xe6 - invalid */
13785
13786
13787/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13788FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13789{
13790 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13791 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13792}
13793
13794
13795/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13796FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13797{
13798 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13799 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13800}
13801
13802
13803/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13804FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13805{
13806 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13807 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13808}
13809
13810
13811/**
13812 * @opcode 0xe7
13813 * @opcodesub !11 mr/reg
13814 * @oppfx none
13815 * @opcpuid sse
13816 * @opgroup og_sse1_cachect
13817 * @opxcpttype none
13818 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13819 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13820 */
13821FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13822{
13823 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13825 if (IEM_IS_MODRM_MEM_MODE(bRm))
13826 {
13827 /* Register, memory. */
13828 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13829 IEM_MC_LOCAL(uint64_t, uSrc);
13830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13831
13832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13834 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13835 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13836 IEM_MC_FPU_TO_MMX_MODE();
13837
13838 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13839 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13840
13841 IEM_MC_ADVANCE_RIP_AND_FINISH();
13842 IEM_MC_END();
13843 }
13844 /**
13845 * @opdone
13846 * @opmnemonic ud0fe7reg
13847 * @opcode 0xe7
13848 * @opcodesub 11 mr/reg
13849 * @oppfx none
13850 * @opunused immediate
13851 * @opcpuid sse
13852 * @optest ->
13853 */
13854 else
13855 IEMOP_RAISE_INVALID_OPCODE_RET();
13856}
13857
13858/**
13859 * @opcode 0xe7
13860 * @opcodesub !11 mr/reg
13861 * @oppfx 0x66
13862 * @opcpuid sse2
13863 * @opgroup og_sse2_cachect
13864 * @opxcpttype 1
13865 * @optest op1=-1 op2=2 -> op1=2
13866 * @optest op1=0 op2=-42 -> op1=-42
13867 */
13868FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13869{
13870 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13872 if (IEM_IS_MODRM_MEM_MODE(bRm))
13873 {
13874 /* Register, memory. */
13875 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13876 IEM_MC_LOCAL(RTUINT128U, uSrc);
13877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13878
13879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13881 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13882 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13883
13884 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13885 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13886
13887 IEM_MC_ADVANCE_RIP_AND_FINISH();
13888 IEM_MC_END();
13889 }
13890
13891 /**
13892 * @opdone
13893 * @opmnemonic ud660fe7reg
13894 * @opcode 0xe7
13895 * @opcodesub 11 mr/reg
13896 * @oppfx 0x66
13897 * @opunused immediate
13898 * @opcpuid sse
13899 * @optest ->
13900 */
13901 else
13902 IEMOP_RAISE_INVALID_OPCODE_RET();
13903}
13904
13905/* Opcode 0xf3 0x0f 0xe7 - invalid */
13906/* Opcode 0xf2 0x0f 0xe7 - invalid */
13907
13908
13909/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13910FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13911{
13912 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13913 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13914}
13915
13916
13917/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13918FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13919{
13920 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13921 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13922}
13923
13924
13925/* Opcode 0xf3 0x0f 0xe8 - invalid */
13926/* Opcode 0xf2 0x0f 0xe8 - invalid */
13927
13928/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13929FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13930{
13931 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13932 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13933}
13934
13935
13936/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13937FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13938{
13939 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13940 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13941}
13942
13943
13944/* Opcode 0xf3 0x0f 0xe9 - invalid */
13945/* Opcode 0xf2 0x0f 0xe9 - invalid */
13946
13947
13948/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13949FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13950{
13951 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13952 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13953}
13954
13955
13956/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13957FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13958{
13959 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13960 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13961}
13962
13963
13964/* Opcode 0xf3 0x0f 0xea - invalid */
13965/* Opcode 0xf2 0x0f 0xea - invalid */
13966
13967
13968/** Opcode 0x0f 0xeb - por Pq, Qq */
13969FNIEMOP_DEF(iemOp_por_Pq_Qq)
13970{
13971 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13972 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13973}
13974
13975
13976/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13977FNIEMOP_DEF(iemOp_por_Vx_Wx)
13978{
13979 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13980 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13981}
13982
13983
13984/* Opcode 0xf3 0x0f 0xeb - invalid */
13985/* Opcode 0xf2 0x0f 0xeb - invalid */
13986
13987/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13988FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13989{
13990 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13991 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13992}
13993
13994
13995/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13996FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13997{
13998 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13999 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
14000}
14001
14002
14003/* Opcode 0xf3 0x0f 0xec - invalid */
14004/* Opcode 0xf2 0x0f 0xec - invalid */
14005
14006/** Opcode 0x0f 0xed - paddsw Pq, Qq */
14007FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
14008{
14009 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14010 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
14011}
14012
14013
14014/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
14015FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
14016{
14017 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14018 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
14019}
14020
14021
14022/* Opcode 0xf3 0x0f 0xed - invalid */
14023/* Opcode 0xf2 0x0f 0xed - invalid */
14024
14025
14026/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
14027FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
14028{
14029 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14030 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
14031}
14032
14033
14034/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
14035FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
14036{
14037 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14038 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
14039}
14040
14041
14042/* Opcode 0xf3 0x0f 0xee - invalid */
14043/* Opcode 0xf2 0x0f 0xee - invalid */
14044
14045
14046/** Opcode 0x0f 0xef - pxor Pq, Qq */
14047FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
14048{
14049 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14050 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
14051}
14052
14053
14054/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
14055FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
14056{
14057 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14058 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
14059}
14060
14061
14062/* Opcode 0xf3 0x0f 0xef - invalid */
14063/* Opcode 0xf2 0x0f 0xef - invalid */
14064
14065/* Opcode 0x0f 0xf0 - invalid */
14066/* Opcode 0x66 0x0f 0xf0 - invalid */
14067
14068
14069/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
14070FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
14071{
14072 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14074 if (IEM_IS_MODRM_REG_MODE(bRm))
14075 {
14076 /*
14077 * Register, register - (not implemented, assuming it raises \#UD).
14078 */
14079 IEMOP_RAISE_INVALID_OPCODE_RET();
14080 }
14081 else
14082 {
14083 /*
14084 * Register, memory.
14085 */
14086 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
14087 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
14088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14089
14090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
14092 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
14093 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
14094 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14095 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
14096
14097 IEM_MC_ADVANCE_RIP_AND_FINISH();
14098 IEM_MC_END();
14099 }
14100}
14101
14102
14103/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
14104FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
14105{
14106 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
14107 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
14108}
14109
14110
14111/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
14112FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
14113{
14114 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14115 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
14116}
14117
14118
14119/* Opcode 0xf2 0x0f 0xf1 - invalid */
14120
14121/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
14122FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
14123{
14124 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
14125 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
14126}
14127
14128
14129/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
14130FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
14131{
14132 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14133 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
14134}
14135
14136
14137/* Opcode 0xf2 0x0f 0xf2 - invalid */
14138
14139/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
14140FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
14141{
14142 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
14143 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
14144}
14145
14146
14147/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
14148FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
14149{
14150 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14151 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
14152}
14153
14154/* Opcode 0xf2 0x0f 0xf3 - invalid */
14155
14156/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
14157FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
14158{
14159 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14160 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
14161}
14162
14163
14164/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
14165FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
14166{
14167 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14168 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
14169}
14170
14171
14172/* Opcode 0xf2 0x0f 0xf4 - invalid */
14173
14174/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
14175FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
14176{
14177 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
14178 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
14179}
14180
14181
14182/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
14183FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
14184{
14185 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
14186 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
14187}
14188
14189/* Opcode 0xf2 0x0f 0xf5 - invalid */
14190
14191/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
14192FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
14193{
14194 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14195 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
14196}
14197
14198
14199/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
14200FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
14201{
14202 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14203 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
14204}
14205
14206
14207/* Opcode 0xf2 0x0f 0xf6 - invalid */
14208
14209/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
14210FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
14211/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
14212FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
14213/* Opcode 0xf2 0x0f 0xf7 - invalid */
14214
14215
14216/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
14217FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
14218{
14219 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14220 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
14221}
14222
14223
14224/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
14225FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
14226{
14227 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14228 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
14229}
14230
14231
14232/* Opcode 0xf2 0x0f 0xf8 - invalid */
14233
14234
14235/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
14236FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
14237{
14238 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14239 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
14240}
14241
14242
14243/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
14244FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
14245{
14246 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14247 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
14248}
14249
14250
14251/* Opcode 0xf2 0x0f 0xf9 - invalid */
14252
14253
14254/** Opcode 0x0f 0xfa - psubd Pq, Qq */
14255FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
14256{
14257 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14258 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
14259}
14260
14261
14262/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
14263FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
14264{
14265 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14266 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
14267}
14268
14269
14270/* Opcode 0xf2 0x0f 0xfa - invalid */
14271
14272
14273/** Opcode 0x0f 0xfb - psubq Pq, Qq */
14274FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
14275{
14276 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14277 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
14278}
14279
14280
14281/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
14282FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
14283{
14284 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14285 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
14286}
14287
14288
14289/* Opcode 0xf2 0x0f 0xfb - invalid */
14290
14291
14292/** Opcode 0x0f 0xfc - paddb Pq, Qq */
14293FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
14294{
14295 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14296 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
14297}
14298
14299
14300/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14301FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14302{
14303 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14304 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
14305}
14306
14307
14308/* Opcode 0xf2 0x0f 0xfc - invalid */
14309
14310
14311/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14312FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14313{
14314 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14315 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
14316}
14317
14318
14319/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14320FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14321{
14322 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14323 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
14324}
14325
14326
14327/* Opcode 0xf2 0x0f 0xfd - invalid */
14328
14329
14330/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14331FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14332{
14333 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14334 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
14335}
14336
14337
14338/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14339FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14340{
14341 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14342 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
14343}
14344
14345
14346/* Opcode 0xf2 0x0f 0xfe - invalid */
14347
14348
14349/** Opcode **** 0x0f 0xff - UD0 */
14350FNIEMOP_DEF(iemOp_ud0)
14351{
14352 IEMOP_MNEMONIC(ud0, "ud0");
14353 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14354 {
14355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14356 if (IEM_IS_MODRM_MEM_MODE(bRm))
14357 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14358 }
14359 IEMOP_HLP_DONE_DECODING();
14360 IEMOP_RAISE_INVALID_OPCODE_RET();
14361}
14362
14363
14364
14365/**
14366 * Two byte opcode map, first byte 0x0f.
14367 *
14368 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14369 * check if it needs updating as well when making changes.
14370 */
14371const PFNIEMOP g_apfnTwoByteMap[] =
14372{
14373 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14374 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14375 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14376 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14377 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14378 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14379 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14380 /* 0x06 */ IEMOP_X4(iemOp_clts),
14381 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14382 /* 0x08 */ IEMOP_X4(iemOp_invd),
14383 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14384 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14385 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14386 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14387 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14388 /* 0x0e */ IEMOP_X4(iemOp_femms),
14389 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14390
14391 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14392 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14393 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14394 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14395 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14396 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14397 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14398 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14399 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14400 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14401 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14402 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14403 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14404 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14405 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14406 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14407
14408 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14409 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14410 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14411 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14412 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14413 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14414 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14415 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14416 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14417 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14418 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14419 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14420 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14421 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14422 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14423 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14424
14425 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14426 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14427 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14428 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14429 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14430 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14431 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14432 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14433 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14434 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14435 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14436 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14437 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14438 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14439 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14440 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14441
14442 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14443 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14444 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14445 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14446 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14447 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14448 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14449 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14450 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14451 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14452 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14453 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14454 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14455 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14456 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14457 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14458
14459 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14460 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14461 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14462 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14463 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14464 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14465 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14466 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14467 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14468 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14469 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14470 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14471 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14472 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14473 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14474 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14475
14476 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14477 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14478 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14479 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14480 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14481 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14482 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14483 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14484 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14485 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14486 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14487 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14488 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14489 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14490 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14491 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14492
14493 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14494 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14495 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14496 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14497 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14498 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14499 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14500 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14501
14502 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14503 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14504 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14505 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14506 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14507 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14508 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14509 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14510
14511 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14512 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14513 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14514 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14515 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14516 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14517 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14518 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14519 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14520 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14521 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14522 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14523 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14524 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14525 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14526 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14527
14528 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14529 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14530 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14531 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14532 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14533 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14534 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14535 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14536 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14537 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14538 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14539 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14540 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14541 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14542 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14543 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14544
14545 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14546 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14547 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14548 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14549 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14550 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14551 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14552 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14553 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14554 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14555 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14556 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14557 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14558 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14559 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14560 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14561
14562 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14563 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14564 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14565 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14566 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14567 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14568 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14569 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14570 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14571 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14572 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14573 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14574 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14575 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14576 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14577 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14578
14579 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14580 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14581 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14582 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14583 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14584 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14585 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14586 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14587 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14588 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14589 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14590 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14591 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14592 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14593 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14594 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14595
14596 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14597 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14598 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14599 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14600 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14601 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14602 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14603 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14604 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14605 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14606 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14607 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14608 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14609 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14610 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14611 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14612
14613 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14614 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14615 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14616 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14617 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14618 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14619 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14620 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14621 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14622 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14623 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14624 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14625 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14626 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14627 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14628 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14629
14630 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14631 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14632 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14633 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14634 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14635 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14636 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14637 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14638 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14639 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14640 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14641 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14642 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14643 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14644 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14645 /* 0xff */ IEMOP_X4(iemOp_ud0),
14646};
14647AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14648
14649/** @} */
14650
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette