VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 101626

Last change on this file since 101626 was 101484, checked in by vboxsync, 14 months ago

VMM/IEM: Basic register allocator sketches that incorporates simple skipping of guest register value loads. Sketched out varable and argument managmenet. Start telling GDB our jitted code to help with backtraces. ++ bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 518.1 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 101484 2023-10-18 01:32:17Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1229 }
1230
1231 /* Ignore operand size here, memory refs are always 16-bit. */
1232 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1233 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1314 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1341}
1342
1343
1344/** Opcode 0x0f 0x00 /5. */
1345FNIEMOPRM_DEF(iemOp_Grp6_verw)
1346{
1347 IEMOP_MNEMONIC(verw, "verw Ew");
1348 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1349}
1350
1351
1352/**
1353 * Group 6 jump table.
1354 */
1355IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1356{
1357 iemOp_Grp6_sldt,
1358 iemOp_Grp6_str,
1359 iemOp_Grp6_lldt,
1360 iemOp_Grp6_ltr,
1361 iemOp_Grp6_verr,
1362 iemOp_Grp6_verw,
1363 iemOp_InvalidWithRM,
1364 iemOp_InvalidWithRM
1365};
1366
1367/** Opcode 0x0f 0x00. */
1368FNIEMOP_DEF(iemOp_Grp6)
1369{
1370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1371 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1372}
1373
1374
1375/** Opcode 0x0f 0x01 /0. */
1376FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1377{
1378 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1379 IEMOP_HLP_MIN_286();
1380 IEMOP_HLP_64BIT_OP_SIZE();
1381 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1382 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1383 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1386 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1387 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1388 IEM_MC_END();
1389}
1390
1391
1392/** Opcode 0x0f 0x01 /0. */
1393FNIEMOP_DEF(iemOp_Grp7_vmcall)
1394{
1395 IEMOP_MNEMONIC(vmcall, "vmcall");
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1397
1398 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1399 want all hypercalls regardless of instruction used, and if a
1400 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1401 (NEM/win makes ASSUMPTIONS about this behavior.) */
1402 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_vmcall);
1403}
1404
1405
1406/** Opcode 0x0f 0x01 /0. */
1407#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1408FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1409{
1410 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1411 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1412 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1413 IEMOP_HLP_DONE_DECODING();
1414 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1415 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1416 iemCImpl_vmlaunch);
1417}
1418#else
1419FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1420{
1421 IEMOP_BITCH_ABOUT_STUB();
1422 IEMOP_RAISE_INVALID_OPCODE_RET();
1423}
1424#endif
1425
1426
1427/** Opcode 0x0f 0x01 /0. */
1428#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1429FNIEMOP_DEF(iemOp_Grp7_vmresume)
1430{
1431 IEMOP_MNEMONIC(vmresume, "vmresume");
1432 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1433 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1434 IEMOP_HLP_DONE_DECODING();
1435 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1436 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1437 iemCImpl_vmresume);
1438}
1439#else
1440FNIEMOP_DEF(iemOp_Grp7_vmresume)
1441{
1442 IEMOP_BITCH_ABOUT_STUB();
1443 IEMOP_RAISE_INVALID_OPCODE_RET();
1444}
1445#endif
1446
1447
1448/** Opcode 0x0f 0x01 /0. */
1449#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1450FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1451{
1452 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1453 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1454 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1455 IEMOP_HLP_DONE_DECODING();
1456 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmxoff);
1457}
1458#else
1459FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1460{
1461 IEMOP_BITCH_ABOUT_STUB();
1462 IEMOP_RAISE_INVALID_OPCODE_RET();
1463}
1464#endif
1465
1466
1467/** Opcode 0x0f 0x01 /1. */
1468FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1469{
1470 IEMOP_MNEMONIC(sidt, "sidt Ms");
1471 IEMOP_HLP_MIN_286();
1472 IEMOP_HLP_64BIT_OP_SIZE();
1473 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1474 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1475 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1478 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1479 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1480 IEM_MC_END();
1481}
1482
1483
1484/** Opcode 0x0f 0x01 /1. */
1485FNIEMOP_DEF(iemOp_Grp7_monitor)
1486{
1487 IEMOP_MNEMONIC(monitor, "monitor");
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1489 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1490}
1491
1492
1493/** Opcode 0x0f 0x01 /1. */
1494FNIEMOP_DEF(iemOp_Grp7_mwait)
1495{
1496 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1498 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_mwait);
1499}
1500
1501
1502/** Opcode 0x0f 0x01 /2. */
1503FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1504{
1505 IEMOP_MNEMONIC(lgdt, "lgdt");
1506 IEMOP_HLP_64BIT_OP_SIZE();
1507 IEM_MC_BEGIN(3, 1, 0, 0);
1508 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1509 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1513 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1514 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1515 IEM_MC_END();
1516}
1517
1518
1519/** Opcode 0x0f 0x01 0xd0. */
1520FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1521{
1522 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1523 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1524 {
1525 /** @todo r=ramshankar: We should use
1526 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1527 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1528/** @todo testcase: test prefixes and exceptions. currently not checking for the
1529 * OPSIZE one ... */
1530 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1531 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_xgetbv);
1532 }
1533 IEMOP_RAISE_INVALID_OPCODE_RET();
1534}
1535
1536
1537/** Opcode 0x0f 0x01 0xd1. */
1538FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1539{
1540 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1541 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1542 {
1543 /** @todo r=ramshankar: We should use
1544 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1545 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1546/** @todo testcase: test prefixes and exceptions. currently not checking for the
1547 * OPSIZE one ... */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_xsetbv);
1550 }
1551 IEMOP_RAISE_INVALID_OPCODE_RET();
1552}
1553
1554
1555/** Opcode 0x0f 0x01 /3. */
1556FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1557{
1558 IEMOP_MNEMONIC(lidt, "lidt");
1559 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1560 IEM_MC_BEGIN(3, 1, 0, 0);
1561 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1562 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1563 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1566 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1567 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1568 IEM_MC_END();
1569}
1570
1571
1572/** Opcode 0x0f 0x01 0xd8. */
1573#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1574FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1575{
1576 IEMOP_MNEMONIC(vmrun, "vmrun");
1577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1578 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1579 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1580 iemCImpl_vmrun);
1581}
1582#else
1583FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1584#endif
1585
1586/** Opcode 0x0f 0x01 0xd9. */
1587FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1588{
1589 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1590 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1591 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1592 * here cannot be right... */
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1594
1595 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1596 want all hypercalls regardless of instruction used, and if a
1597 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1598 (NEM/win makes ASSUMPTIONS about this behavior.) */
1599 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmmcall);
1600}
1601
1602/** Opcode 0x0f 0x01 0xda. */
1603#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1604FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1605{
1606 IEMOP_MNEMONIC(vmload, "vmload");
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1608 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmload);
1609}
1610#else
1611FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1612#endif
1613
1614
1615/** Opcode 0x0f 0x01 0xdb. */
1616#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1617FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1618{
1619 IEMOP_MNEMONIC(vmsave, "vmsave");
1620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1621 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmsave);
1622}
1623#else
1624FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1625#endif
1626
1627
1628/** Opcode 0x0f 0x01 0xdc. */
1629#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1630FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1631{
1632 IEMOP_MNEMONIC(stgi, "stgi");
1633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1634 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_stgi);
1635}
1636#else
1637FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1638#endif
1639
1640
1641/** Opcode 0x0f 0x01 0xdd. */
1642#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1643FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1644{
1645 IEMOP_MNEMONIC(clgi, "clgi");
1646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1647 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clgi);
1648}
1649#else
1650FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1651#endif
1652
1653
1654/** Opcode 0x0f 0x01 0xdf. */
1655#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1656FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1657{
1658 IEMOP_MNEMONIC(invlpga, "invlpga");
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1660 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpga);
1661}
1662#else
1663FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1664#endif
1665
1666
1667/** Opcode 0x0f 0x01 0xde. */
1668#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1669FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1670{
1671 IEMOP_MNEMONIC(skinit, "skinit");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1673 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_skinit);
1674}
1675#else
1676FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1677#endif
1678
1679
1680/** Opcode 0x0f 0x01 /4. */
1681FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1682{
1683 IEMOP_MNEMONIC(smsw, "smsw");
1684 IEMOP_HLP_MIN_286();
1685 if (IEM_IS_MODRM_REG_MODE(bRm))
1686 {
1687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1688 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1689 }
1690
1691 /* Ignore operand size here, memory refs are always 16-bit. */
1692 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1693 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1694 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1697 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1698 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1699 IEM_MC_END();
1700}
1701
1702
1703/** Opcode 0x0f 0x01 /6. */
1704FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1705{
1706 /* The operand size is effectively ignored, all is 16-bit and only the
1707 lower 3-bits are used. */
1708 IEMOP_MNEMONIC(lmsw, "lmsw");
1709 IEMOP_HLP_MIN_286();
1710 if (IEM_IS_MODRM_REG_MODE(bRm))
1711 {
1712 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1715 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1716 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1717 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1723 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1724 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1727 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1728 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1729 IEM_MC_END();
1730 }
1731}
1732
1733
1734/** Opcode 0x0f 0x01 /7. */
1735FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1736{
1737 IEMOP_MNEMONIC(invlpg, "invlpg");
1738 IEMOP_HLP_MIN_486();
1739 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
1740 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1743 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1744 IEM_MC_END();
1745}
1746
1747
1748/** Opcode 0x0f 0x01 0xf8. */
1749FNIEMOP_DEF(iemOp_Grp7_swapgs)
1750{
1751 IEMOP_MNEMONIC(swapgs, "swapgs");
1752 IEMOP_HLP_ONLY_64BIT();
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1754 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_swapgs);
1755}
1756
1757
1758/** Opcode 0x0f 0x01 0xf9. */
1759FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1760{
1761 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1763 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtscp);
1764}
1765
1766
1767/**
1768 * Group 7 jump table, memory variant.
1769 */
1770IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1771{
1772 iemOp_Grp7_sgdt,
1773 iemOp_Grp7_sidt,
1774 iemOp_Grp7_lgdt,
1775 iemOp_Grp7_lidt,
1776 iemOp_Grp7_smsw,
1777 iemOp_InvalidWithRM,
1778 iemOp_Grp7_lmsw,
1779 iemOp_Grp7_invlpg
1780};
1781
1782
1783/** Opcode 0x0f 0x01. */
1784FNIEMOP_DEF(iemOp_Grp7)
1785{
1786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1787 if (IEM_IS_MODRM_MEM_MODE(bRm))
1788 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1789
1790 switch (IEM_GET_MODRM_REG_8(bRm))
1791 {
1792 case 0:
1793 switch (IEM_GET_MODRM_RM_8(bRm))
1794 {
1795 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1796 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1797 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1798 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1799 }
1800 IEMOP_RAISE_INVALID_OPCODE_RET();
1801
1802 case 1:
1803 switch (IEM_GET_MODRM_RM_8(bRm))
1804 {
1805 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1806 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1807 }
1808 IEMOP_RAISE_INVALID_OPCODE_RET();
1809
1810 case 2:
1811 switch (IEM_GET_MODRM_RM_8(bRm))
1812 {
1813 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1814 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1815 }
1816 IEMOP_RAISE_INVALID_OPCODE_RET();
1817
1818 case 3:
1819 switch (IEM_GET_MODRM_RM_8(bRm))
1820 {
1821 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1822 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1823 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1824 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1825 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1826 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1827 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1828 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1830 }
1831
1832 case 4:
1833 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1834
1835 case 5:
1836 IEMOP_RAISE_INVALID_OPCODE_RET();
1837
1838 case 6:
1839 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1840
1841 case 7:
1842 switch (IEM_GET_MODRM_RM_8(bRm))
1843 {
1844 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1845 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1846 }
1847 IEMOP_RAISE_INVALID_OPCODE_RET();
1848
1849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1850 }
1851}
1852
1853/** Opcode 0x0f 0x00 /3. */
1854FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1855{
1856 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1858
1859 if (IEM_IS_MODRM_REG_MODE(bRm))
1860 {
1861 switch (pVCpu->iem.s.enmEffOpSize)
1862 {
1863 case IEMMODE_16BIT:
1864 IEM_MC_BEGIN(3, 0, 0, 0);
1865 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1867 IEM_MC_ARG(uint16_t, u16Sel, 1);
1868 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1869
1870 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1871 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1872 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1873
1874 IEM_MC_END();
1875 break;
1876
1877 case IEMMODE_32BIT:
1878 case IEMMODE_64BIT:
1879 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
1880 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1881 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1882 IEM_MC_ARG(uint16_t, u16Sel, 1);
1883 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1884
1885 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1886 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1887 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1888
1889 IEM_MC_END();
1890 break;
1891
1892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1893 }
1894 }
1895 else
1896 {
1897 switch (pVCpu->iem.s.enmEffOpSize)
1898 {
1899 case IEMMODE_16BIT:
1900 IEM_MC_BEGIN(3, 1, 0, 0);
1901 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1902 IEM_MC_ARG(uint16_t, u16Sel, 1);
1903 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1905
1906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1907 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1908
1909 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1910 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1911 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1912
1913 IEM_MC_END();
1914 break;
1915
1916 case IEMMODE_32BIT:
1917 case IEMMODE_64BIT:
1918 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
1919 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1920 IEM_MC_ARG(uint16_t, u16Sel, 1);
1921 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1923
1924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1925 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1926/** @todo testcase: make sure it's a 16-bit read. */
1927
1928 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1929 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1930 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1931
1932 IEM_MC_END();
1933 break;
1934
1935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1936 }
1937 }
1938}
1939
1940
1941
1942/** Opcode 0x0f 0x02. */
1943FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1944{
1945 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1946 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1947}
1948
1949
1950/** Opcode 0x0f 0x03. */
1951FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1952{
1953 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1954 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1955}
1956
1957
1958/** Opcode 0x0f 0x05. */
1959FNIEMOP_DEF(iemOp_syscall)
1960{
1961 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1963 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1964 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1965 iemCImpl_syscall);
1966}
1967
1968
1969/** Opcode 0x0f 0x06. */
1970FNIEMOP_DEF(iemOp_clts)
1971{
1972 IEMOP_MNEMONIC(clts, "clts");
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clts);
1975}
1976
1977
1978/** Opcode 0x0f 0x07. */
1979FNIEMOP_DEF(iemOp_sysret)
1980{
1981 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1983 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1984 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1985 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1986}
1987
1988
1989/** Opcode 0x0f 0x08. */
1990FNIEMOP_DEF(iemOp_invd)
1991{
1992 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1993 IEMOP_HLP_MIN_486();
1994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1995 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invd);
1996}
1997
1998
1999/** Opcode 0x0f 0x09. */
2000FNIEMOP_DEF(iemOp_wbinvd)
2001{
2002 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2003 IEMOP_HLP_MIN_486();
2004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2005 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wbinvd);
2006}
2007
2008
2009/** Opcode 0x0f 0x0b. */
2010FNIEMOP_DEF(iemOp_ud2)
2011{
2012 IEMOP_MNEMONIC(ud2, "ud2");
2013 IEMOP_RAISE_INVALID_OPCODE_RET();
2014}
2015
2016/** Opcode 0x0f 0x0d. */
2017FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2018{
2019 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2020 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2021 {
2022 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2023 IEMOP_RAISE_INVALID_OPCODE_RET();
2024 }
2025
2026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2027 if (IEM_IS_MODRM_REG_MODE(bRm))
2028 {
2029 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2030 IEMOP_RAISE_INVALID_OPCODE_RET();
2031 }
2032
2033 switch (IEM_GET_MODRM_REG_8(bRm))
2034 {
2035 case 2: /* Aliased to /0 for the time being. */
2036 case 4: /* Aliased to /0 for the time being. */
2037 case 5: /* Aliased to /0 for the time being. */
2038 case 6: /* Aliased to /0 for the time being. */
2039 case 7: /* Aliased to /0 for the time being. */
2040 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2041 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2042 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2044 }
2045
2046 IEM_MC_BEGIN(0, 1, 0, 0);
2047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2050 /* Currently a NOP. */
2051 NOREF(GCPtrEffSrc);
2052 IEM_MC_ADVANCE_RIP_AND_FINISH();
2053 IEM_MC_END();
2054}
2055
2056
2057/** Opcode 0x0f 0x0e. */
2058FNIEMOP_DEF(iemOp_femms)
2059{
2060 IEMOP_MNEMONIC(femms, "femms");
2061
2062 IEM_MC_BEGIN(0, 0, 0, 0);
2063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2066 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2067 IEM_MC_FPU_FROM_MMX_MODE();
2068 IEM_MC_ADVANCE_RIP_AND_FINISH();
2069 IEM_MC_END();
2070}
2071
2072
2073/** Opcode 0x0f 0x0f. */
2074FNIEMOP_DEF(iemOp_3Dnow)
2075{
2076 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2077 {
2078 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2079 IEMOP_RAISE_INVALID_OPCODE_RET();
2080 }
2081
2082#ifdef IEM_WITH_3DNOW
2083 /* This is pretty sparse, use switch instead of table. */
2084 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2085 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2086#else
2087 IEMOP_BITCH_ABOUT_STUB();
2088 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2089#endif
2090}
2091
2092
2093/**
2094 * @opcode 0x10
2095 * @oppfx none
2096 * @opcpuid sse
2097 * @opgroup og_sse_simdfp_datamove
2098 * @opxcpttype 4UA
2099 * @optest op1=1 op2=2 -> op1=2
2100 * @optest op1=0 op2=-22 -> op1=-22
2101 */
2102FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2103{
2104 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2106 if (IEM_IS_MODRM_REG_MODE(bRm))
2107 {
2108 /*
2109 * XMM128, XMM128.
2110 */
2111 IEM_MC_BEGIN(0, 0, 0, 0);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2113 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2115 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2116 IEM_GET_MODRM_RM(pVCpu, bRm));
2117 IEM_MC_ADVANCE_RIP_AND_FINISH();
2118 IEM_MC_END();
2119 }
2120 else
2121 {
2122 /*
2123 * XMM128, [mem128].
2124 */
2125 IEM_MC_BEGIN(0, 2, 0, 0);
2126 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2128
2129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2131 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2132 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2133
2134 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2135 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2136
2137 IEM_MC_ADVANCE_RIP_AND_FINISH();
2138 IEM_MC_END();
2139 }
2140
2141}
2142
2143
2144/**
2145 * @opcode 0x10
2146 * @oppfx 0x66
2147 * @opcpuid sse2
2148 * @opgroup og_sse2_pcksclr_datamove
2149 * @opxcpttype 4UA
2150 * @optest op1=1 op2=2 -> op1=2
2151 * @optest op1=0 op2=-42 -> op1=-42
2152 */
2153FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2154{
2155 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2157 if (IEM_IS_MODRM_REG_MODE(bRm))
2158 {
2159 /*
2160 * XMM128, XMM128.
2161 */
2162 IEM_MC_BEGIN(0, 0, 0, 0);
2163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2164 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2165 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2166 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2167 IEM_GET_MODRM_RM(pVCpu, bRm));
2168 IEM_MC_ADVANCE_RIP_AND_FINISH();
2169 IEM_MC_END();
2170 }
2171 else
2172 {
2173 /*
2174 * XMM128, [mem128].
2175 */
2176 IEM_MC_BEGIN(0, 2, 0, 0);
2177 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2179
2180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2182 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2183 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2184
2185 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2186 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2187
2188 IEM_MC_ADVANCE_RIP_AND_FINISH();
2189 IEM_MC_END();
2190 }
2191}
2192
2193
2194/**
2195 * @opcode 0x10
2196 * @oppfx 0xf3
2197 * @opcpuid sse
2198 * @opgroup og_sse_simdfp_datamove
2199 * @opxcpttype 5
2200 * @optest op1=1 op2=2 -> op1=2
2201 * @optest op1=0 op2=-22 -> op1=-22
2202 */
2203FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2204{
2205 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2207 if (IEM_IS_MODRM_REG_MODE(bRm))
2208 {
2209 /*
2210 * XMM32, XMM32.
2211 */
2212 IEM_MC_BEGIN(0, 1, 0, 0);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2214 IEM_MC_LOCAL(uint32_t, uSrc);
2215
2216 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2217 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2218 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2219 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2220
2221 IEM_MC_ADVANCE_RIP_AND_FINISH();
2222 IEM_MC_END();
2223 }
2224 else
2225 {
2226 /*
2227 * XMM128, [mem32].
2228 */
2229 IEM_MC_BEGIN(0, 2, 0, 0);
2230 IEM_MC_LOCAL(uint32_t, uSrc);
2231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2232
2233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2235 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2236 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2237
2238 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2239 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2240
2241 IEM_MC_ADVANCE_RIP_AND_FINISH();
2242 IEM_MC_END();
2243 }
2244}
2245
2246
2247/**
2248 * @opcode 0x10
2249 * @oppfx 0xf2
2250 * @opcpuid sse2
2251 * @opgroup og_sse2_pcksclr_datamove
2252 * @opxcpttype 5
2253 * @optest op1=1 op2=2 -> op1=2
2254 * @optest op1=0 op2=-42 -> op1=-42
2255 */
2256FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2257{
2258 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2260 if (IEM_IS_MODRM_REG_MODE(bRm))
2261 {
2262 /*
2263 * XMM64, XMM64.
2264 */
2265 IEM_MC_BEGIN(0, 1, 0, 0);
2266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2267 IEM_MC_LOCAL(uint64_t, uSrc);
2268
2269 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2270 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2271 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2272 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2273
2274 IEM_MC_ADVANCE_RIP_AND_FINISH();
2275 IEM_MC_END();
2276 }
2277 else
2278 {
2279 /*
2280 * XMM128, [mem64].
2281 */
2282 IEM_MC_BEGIN(0, 2, 0, 0);
2283 IEM_MC_LOCAL(uint64_t, uSrc);
2284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2285
2286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2288 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2289 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2290
2291 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2292 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2293
2294 IEM_MC_ADVANCE_RIP_AND_FINISH();
2295 IEM_MC_END();
2296 }
2297}
2298
2299
2300/**
2301 * @opcode 0x11
2302 * @oppfx none
2303 * @opcpuid sse
2304 * @opgroup og_sse_simdfp_datamove
2305 * @opxcpttype 4UA
2306 * @optest op1=1 op2=2 -> op1=2
2307 * @optest op1=0 op2=-42 -> op1=-42
2308 */
2309FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2310{
2311 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2313 if (IEM_IS_MODRM_REG_MODE(bRm))
2314 {
2315 /*
2316 * XMM128, XMM128.
2317 */
2318 IEM_MC_BEGIN(0, 0, 0, 0);
2319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2322 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2323 IEM_GET_MODRM_REG(pVCpu, bRm));
2324 IEM_MC_ADVANCE_RIP_AND_FINISH();
2325 IEM_MC_END();
2326 }
2327 else
2328 {
2329 /*
2330 * [mem128], XMM128.
2331 */
2332 IEM_MC_BEGIN(0, 2, 0, 0);
2333 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2335
2336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2339 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2340
2341 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2342 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2343
2344 IEM_MC_ADVANCE_RIP_AND_FINISH();
2345 IEM_MC_END();
2346 }
2347}
2348
2349
2350/**
2351 * @opcode 0x11
2352 * @oppfx 0x66
2353 * @opcpuid sse2
2354 * @opgroup og_sse2_pcksclr_datamove
2355 * @opxcpttype 4UA
2356 * @optest op1=1 op2=2 -> op1=2
2357 * @optest op1=0 op2=-42 -> op1=-42
2358 */
2359FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2360{
2361 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2363 if (IEM_IS_MODRM_REG_MODE(bRm))
2364 {
2365 /*
2366 * XMM128, XMM128.
2367 */
2368 IEM_MC_BEGIN(0, 0, 0, 0);
2369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2370 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2371 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2372 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2373 IEM_GET_MODRM_REG(pVCpu, bRm));
2374 IEM_MC_ADVANCE_RIP_AND_FINISH();
2375 IEM_MC_END();
2376 }
2377 else
2378 {
2379 /*
2380 * [mem128], XMM128.
2381 */
2382 IEM_MC_BEGIN(0, 2, 0, 0);
2383 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2385
2386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2388 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2389 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2390
2391 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2392 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2393
2394 IEM_MC_ADVANCE_RIP_AND_FINISH();
2395 IEM_MC_END();
2396 }
2397}
2398
2399
2400/**
2401 * @opcode 0x11
2402 * @oppfx 0xf3
2403 * @opcpuid sse
2404 * @opgroup og_sse_simdfp_datamove
2405 * @opxcpttype 5
2406 * @optest op1=1 op2=2 -> op1=2
2407 * @optest op1=0 op2=-22 -> op1=-22
2408 */
2409FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2410{
2411 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2413 if (IEM_IS_MODRM_REG_MODE(bRm))
2414 {
2415 /*
2416 * XMM32, XMM32.
2417 */
2418 IEM_MC_BEGIN(0, 1, 0, 0);
2419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2420 IEM_MC_LOCAL(uint32_t, uSrc);
2421
2422 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2423 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2424 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2425 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2426
2427 IEM_MC_ADVANCE_RIP_AND_FINISH();
2428 IEM_MC_END();
2429 }
2430 else
2431 {
2432 /*
2433 * [mem32], XMM32.
2434 */
2435 IEM_MC_BEGIN(0, 2, 0, 0);
2436 IEM_MC_LOCAL(uint32_t, uSrc);
2437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2438
2439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2441 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2443
2444 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2445 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2446
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 }
2450}
2451
2452
2453/**
2454 * @opcode 0x11
2455 * @oppfx 0xf2
2456 * @opcpuid sse2
2457 * @opgroup og_sse2_pcksclr_datamove
2458 * @opxcpttype 5
2459 * @optest op1=1 op2=2 -> op1=2
2460 * @optest op1=0 op2=-42 -> op1=-42
2461 */
2462FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2463{
2464 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2466 if (IEM_IS_MODRM_REG_MODE(bRm))
2467 {
2468 /*
2469 * XMM64, XMM64.
2470 */
2471 IEM_MC_BEGIN(0, 1, 0, 0);
2472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2473 IEM_MC_LOCAL(uint64_t, uSrc);
2474
2475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2476 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2477 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2478 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2479
2480 IEM_MC_ADVANCE_RIP_AND_FINISH();
2481 IEM_MC_END();
2482 }
2483 else
2484 {
2485 /*
2486 * [mem64], XMM64.
2487 */
2488 IEM_MC_BEGIN(0, 2, 0, 0);
2489 IEM_MC_LOCAL(uint64_t, uSrc);
2490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2491
2492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2494 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2496
2497 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2498 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2499
2500 IEM_MC_ADVANCE_RIP_AND_FINISH();
2501 IEM_MC_END();
2502 }
2503}
2504
2505
2506FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2507{
2508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2509 if (IEM_IS_MODRM_REG_MODE(bRm))
2510 {
2511 /**
2512 * @opcode 0x12
2513 * @opcodesub 11 mr/reg
2514 * @oppfx none
2515 * @opcpuid sse
2516 * @opgroup og_sse_simdfp_datamove
2517 * @opxcpttype 5
2518 * @optest op1=1 op2=2 -> op1=2
2519 * @optest op1=0 op2=-42 -> op1=-42
2520 */
2521 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2522
2523 IEM_MC_BEGIN(0, 1, 0, 0);
2524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2525 IEM_MC_LOCAL(uint64_t, uSrc);
2526
2527 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2529 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2530 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2531
2532 IEM_MC_ADVANCE_RIP_AND_FINISH();
2533 IEM_MC_END();
2534 }
2535 else
2536 {
2537 /**
2538 * @opdone
2539 * @opcode 0x12
2540 * @opcodesub !11 mr/reg
2541 * @oppfx none
2542 * @opcpuid sse
2543 * @opgroup og_sse_simdfp_datamove
2544 * @opxcpttype 5
2545 * @optest op1=1 op2=2 -> op1=2
2546 * @optest op1=0 op2=-42 -> op1=-42
2547 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2548 */
2549 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2550
2551 IEM_MC_BEGIN(0, 2, 0, 0);
2552 IEM_MC_LOCAL(uint64_t, uSrc);
2553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2554
2555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2557 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2558 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2559
2560 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2561 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2562
2563 IEM_MC_ADVANCE_RIP_AND_FINISH();
2564 IEM_MC_END();
2565 }
2566}
2567
2568
2569/**
2570 * @opcode 0x12
2571 * @opcodesub !11 mr/reg
2572 * @oppfx 0x66
2573 * @opcpuid sse2
2574 * @opgroup og_sse2_pcksclr_datamove
2575 * @opxcpttype 5
2576 * @optest op1=1 op2=2 -> op1=2
2577 * @optest op1=0 op2=-42 -> op1=-42
2578 */
2579FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2580{
2581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2582 if (IEM_IS_MODRM_MEM_MODE(bRm))
2583 {
2584 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2585
2586 IEM_MC_BEGIN(0, 2, 0, 0);
2587 IEM_MC_LOCAL(uint64_t, uSrc);
2588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2589
2590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2592 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2593 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2594
2595 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2596 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2597
2598 IEM_MC_ADVANCE_RIP_AND_FINISH();
2599 IEM_MC_END();
2600 }
2601
2602 /**
2603 * @opdone
2604 * @opmnemonic ud660f12m3
2605 * @opcode 0x12
2606 * @opcodesub 11 mr/reg
2607 * @oppfx 0x66
2608 * @opunused immediate
2609 * @opcpuid sse
2610 * @optest ->
2611 */
2612 else
2613 IEMOP_RAISE_INVALID_OPCODE_RET();
2614}
2615
2616
2617/**
2618 * @opcode 0x12
2619 * @oppfx 0xf3
2620 * @opcpuid sse3
2621 * @opgroup og_sse3_pcksclr_datamove
2622 * @opxcpttype 4
2623 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2624 * op1=0x00000002000000020000000100000001
2625 */
2626FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2627{
2628 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2630 if (IEM_IS_MODRM_REG_MODE(bRm))
2631 {
2632 /*
2633 * XMM, XMM.
2634 */
2635 IEM_MC_BEGIN(0, 1, 0, 0);
2636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2637 IEM_MC_LOCAL(RTUINT128U, uSrc);
2638
2639 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2640 IEM_MC_PREPARE_SSE_USAGE();
2641
2642 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2643 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2644 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2645 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2646 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2647
2648 IEM_MC_ADVANCE_RIP_AND_FINISH();
2649 IEM_MC_END();
2650 }
2651 else
2652 {
2653 /*
2654 * XMM, [mem128].
2655 */
2656 IEM_MC_BEGIN(0, 2, 0, 0);
2657 IEM_MC_LOCAL(RTUINT128U, uSrc);
2658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2659
2660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2662 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2663 IEM_MC_PREPARE_SSE_USAGE();
2664
2665 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2666 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2667 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2668 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2669 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2670
2671 IEM_MC_ADVANCE_RIP_AND_FINISH();
2672 IEM_MC_END();
2673 }
2674}
2675
2676
2677/**
2678 * @opcode 0x12
2679 * @oppfx 0xf2
2680 * @opcpuid sse3
2681 * @opgroup og_sse3_pcksclr_datamove
2682 * @opxcpttype 5
2683 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2684 * op1=0x22222222111111112222222211111111
2685 */
2686FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2687{
2688 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (IEM_IS_MODRM_REG_MODE(bRm))
2691 {
2692 /*
2693 * XMM128, XMM64.
2694 */
2695 IEM_MC_BEGIN(1, 0, 0, 0);
2696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2697 IEM_MC_ARG(uint64_t, uSrc, 0);
2698
2699 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2700 IEM_MC_PREPARE_SSE_USAGE();
2701
2702 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2703 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2704 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2705
2706 IEM_MC_ADVANCE_RIP_AND_FINISH();
2707 IEM_MC_END();
2708 }
2709 else
2710 {
2711 /*
2712 * XMM128, [mem64].
2713 */
2714 IEM_MC_BEGIN(1, 1, 0, 0);
2715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2716 IEM_MC_ARG(uint64_t, uSrc, 0);
2717
2718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2720 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2721 IEM_MC_PREPARE_SSE_USAGE();
2722
2723 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2724 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2725 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2726
2727 IEM_MC_ADVANCE_RIP_AND_FINISH();
2728 IEM_MC_END();
2729 }
2730}
2731
2732
2733/**
2734 * @opcode 0x13
2735 * @opcodesub !11 mr/reg
2736 * @oppfx none
2737 * @opcpuid sse
2738 * @opgroup og_sse_simdfp_datamove
2739 * @opxcpttype 5
2740 * @optest op1=1 op2=2 -> op1=2
2741 * @optest op1=0 op2=-42 -> op1=-42
2742 */
2743FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2744{
2745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2746 if (IEM_IS_MODRM_MEM_MODE(bRm))
2747 {
2748 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2749
2750 IEM_MC_BEGIN(0, 2, 0, 0);
2751 IEM_MC_LOCAL(uint64_t, uSrc);
2752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2753
2754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2757 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2758
2759 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2760 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2761
2762 IEM_MC_ADVANCE_RIP_AND_FINISH();
2763 IEM_MC_END();
2764 }
2765
2766 /**
2767 * @opdone
2768 * @opmnemonic ud0f13m3
2769 * @opcode 0x13
2770 * @opcodesub 11 mr/reg
2771 * @oppfx none
2772 * @opunused immediate
2773 * @opcpuid sse
2774 * @optest ->
2775 */
2776 else
2777 IEMOP_RAISE_INVALID_OPCODE_RET();
2778}
2779
2780
2781/**
2782 * @opcode 0x13
2783 * @opcodesub !11 mr/reg
2784 * @oppfx 0x66
2785 * @opcpuid sse2
2786 * @opgroup og_sse2_pcksclr_datamove
2787 * @opxcpttype 5
2788 * @optest op1=1 op2=2 -> op1=2
2789 * @optest op1=0 op2=-42 -> op1=-42
2790 */
2791FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2792{
2793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2794 if (IEM_IS_MODRM_MEM_MODE(bRm))
2795 {
2796 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2797
2798 IEM_MC_BEGIN(0, 2, 0, 0);
2799 IEM_MC_LOCAL(uint64_t, uSrc);
2800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2801
2802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2804 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2805 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2806
2807 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2808 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2809
2810 IEM_MC_ADVANCE_RIP_AND_FINISH();
2811 IEM_MC_END();
2812 }
2813
2814 /**
2815 * @opdone
2816 * @opmnemonic ud660f13m3
2817 * @opcode 0x13
2818 * @opcodesub 11 mr/reg
2819 * @oppfx 0x66
2820 * @opunused immediate
2821 * @opcpuid sse
2822 * @optest ->
2823 */
2824 else
2825 IEMOP_RAISE_INVALID_OPCODE_RET();
2826}
2827
2828
2829/**
2830 * @opmnemonic udf30f13
2831 * @opcode 0x13
2832 * @oppfx 0xf3
2833 * @opunused intel-modrm
2834 * @opcpuid sse
2835 * @optest ->
2836 * @opdone
2837 */
2838
2839/**
2840 * @opmnemonic udf20f13
2841 * @opcode 0x13
2842 * @oppfx 0xf2
2843 * @opunused intel-modrm
2844 * @opcpuid sse
2845 * @optest ->
2846 * @opdone
2847 */
2848
2849/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2850FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2851{
2852 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2853 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2854}
2855
2856
2857/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2858FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2859{
2860 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2861 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2862}
2863
2864
2865/**
2866 * @opdone
2867 * @opmnemonic udf30f14
2868 * @opcode 0x14
2869 * @oppfx 0xf3
2870 * @opunused intel-modrm
2871 * @opcpuid sse
2872 * @optest ->
2873 * @opdone
2874 */
2875
2876/**
2877 * @opmnemonic udf20f14
2878 * @opcode 0x14
2879 * @oppfx 0xf2
2880 * @opunused intel-modrm
2881 * @opcpuid sse
2882 * @optest ->
2883 * @opdone
2884 */
2885
2886/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2887FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2888{
2889 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2890 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2891}
2892
2893
2894/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2895FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2896{
2897 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2898 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2899}
2900
2901
2902/* Opcode 0xf3 0x0f 0x15 - invalid */
2903/* Opcode 0xf2 0x0f 0x15 - invalid */
2904
2905/**
2906 * @opdone
2907 * @opmnemonic udf30f15
2908 * @opcode 0x15
2909 * @oppfx 0xf3
2910 * @opunused intel-modrm
2911 * @opcpuid sse
2912 * @optest ->
2913 * @opdone
2914 */
2915
2916/**
2917 * @opmnemonic udf20f15
2918 * @opcode 0x15
2919 * @oppfx 0xf2
2920 * @opunused intel-modrm
2921 * @opcpuid sse
2922 * @optest ->
2923 * @opdone
2924 */
2925
2926FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2927{
2928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2929 if (IEM_IS_MODRM_REG_MODE(bRm))
2930 {
2931 /**
2932 * @opcode 0x16
2933 * @opcodesub 11 mr/reg
2934 * @oppfx none
2935 * @opcpuid sse
2936 * @opgroup og_sse_simdfp_datamove
2937 * @opxcpttype 5
2938 * @optest op1=1 op2=2 -> op1=2
2939 * @optest op1=0 op2=-42 -> op1=-42
2940 */
2941 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2942
2943 IEM_MC_BEGIN(0, 1, 0, 0);
2944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2945 IEM_MC_LOCAL(uint64_t, uSrc);
2946
2947 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2948 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2949 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2950 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2951
2952 IEM_MC_ADVANCE_RIP_AND_FINISH();
2953 IEM_MC_END();
2954 }
2955 else
2956 {
2957 /**
2958 * @opdone
2959 * @opcode 0x16
2960 * @opcodesub !11 mr/reg
2961 * @oppfx none
2962 * @opcpuid sse
2963 * @opgroup og_sse_simdfp_datamove
2964 * @opxcpttype 5
2965 * @optest op1=1 op2=2 -> op1=2
2966 * @optest op1=0 op2=-42 -> op1=-42
2967 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2968 */
2969 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2970
2971 IEM_MC_BEGIN(0, 2, 0, 0);
2972 IEM_MC_LOCAL(uint64_t, uSrc);
2973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2974
2975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2977 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2978 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2979
2980 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2981 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2982
2983 IEM_MC_ADVANCE_RIP_AND_FINISH();
2984 IEM_MC_END();
2985 }
2986}
2987
2988
2989/**
2990 * @opcode 0x16
2991 * @opcodesub !11 mr/reg
2992 * @oppfx 0x66
2993 * @opcpuid sse2
2994 * @opgroup og_sse2_pcksclr_datamove
2995 * @opxcpttype 5
2996 * @optest op1=1 op2=2 -> op1=2
2997 * @optest op1=0 op2=-42 -> op1=-42
2998 */
2999FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3000{
3001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3002 if (IEM_IS_MODRM_MEM_MODE(bRm))
3003 {
3004 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3005
3006 IEM_MC_BEGIN(0, 2, 0, 0);
3007 IEM_MC_LOCAL(uint64_t, uSrc);
3008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3009
3010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3014
3015 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3016 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3017
3018 IEM_MC_ADVANCE_RIP_AND_FINISH();
3019 IEM_MC_END();
3020 }
3021
3022 /**
3023 * @opdone
3024 * @opmnemonic ud660f16m3
3025 * @opcode 0x16
3026 * @opcodesub 11 mr/reg
3027 * @oppfx 0x66
3028 * @opunused immediate
3029 * @opcpuid sse
3030 * @optest ->
3031 */
3032 else
3033 IEMOP_RAISE_INVALID_OPCODE_RET();
3034}
3035
3036
3037/**
3038 * @opcode 0x16
3039 * @oppfx 0xf3
3040 * @opcpuid sse3
3041 * @opgroup og_sse3_pcksclr_datamove
3042 * @opxcpttype 4
3043 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3044 * op1=0x00000002000000020000000100000001
3045 */
3046FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3047{
3048 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3050 if (IEM_IS_MODRM_REG_MODE(bRm))
3051 {
3052 /*
3053 * XMM128, XMM128.
3054 */
3055 IEM_MC_BEGIN(0, 1, 0, 0);
3056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3057 IEM_MC_LOCAL(RTUINT128U, uSrc);
3058
3059 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3060 IEM_MC_PREPARE_SSE_USAGE();
3061
3062 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3063 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3064 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3065 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3066 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3067
3068 IEM_MC_ADVANCE_RIP_AND_FINISH();
3069 IEM_MC_END();
3070 }
3071 else
3072 {
3073 /*
3074 * XMM128, [mem128].
3075 */
3076 IEM_MC_BEGIN(0, 2, 0, 0);
3077 IEM_MC_LOCAL(RTUINT128U, uSrc);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3079
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3082 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3083 IEM_MC_PREPARE_SSE_USAGE();
3084
3085 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3086 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3087 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3088 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3089 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3090
3091 IEM_MC_ADVANCE_RIP_AND_FINISH();
3092 IEM_MC_END();
3093 }
3094}
3095
3096/**
3097 * @opdone
3098 * @opmnemonic udf30f16
3099 * @opcode 0x16
3100 * @oppfx 0xf2
3101 * @opunused intel-modrm
3102 * @opcpuid sse
3103 * @optest ->
3104 * @opdone
3105 */
3106
3107
3108/**
3109 * @opcode 0x17
3110 * @opcodesub !11 mr/reg
3111 * @oppfx none
3112 * @opcpuid sse
3113 * @opgroup og_sse_simdfp_datamove
3114 * @opxcpttype 5
3115 * @optest op1=1 op2=2 -> op1=2
3116 * @optest op1=0 op2=-42 -> op1=-42
3117 */
3118FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3119{
3120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3121 if (IEM_IS_MODRM_MEM_MODE(bRm))
3122 {
3123 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3124
3125 IEM_MC_BEGIN(0, 2, 0, 0);
3126 IEM_MC_LOCAL(uint64_t, uSrc);
3127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3128
3129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3131 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3132 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3133
3134 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3135 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3136
3137 IEM_MC_ADVANCE_RIP_AND_FINISH();
3138 IEM_MC_END();
3139 }
3140
3141 /**
3142 * @opdone
3143 * @opmnemonic ud0f17m3
3144 * @opcode 0x17
3145 * @opcodesub 11 mr/reg
3146 * @oppfx none
3147 * @opunused immediate
3148 * @opcpuid sse
3149 * @optest ->
3150 */
3151 else
3152 IEMOP_RAISE_INVALID_OPCODE_RET();
3153}
3154
3155
3156/**
3157 * @opcode 0x17
3158 * @opcodesub !11 mr/reg
3159 * @oppfx 0x66
3160 * @opcpuid sse2
3161 * @opgroup og_sse2_pcksclr_datamove
3162 * @opxcpttype 5
3163 * @optest op1=1 op2=2 -> op1=2
3164 * @optest op1=0 op2=-42 -> op1=-42
3165 */
3166FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3167{
3168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3169 if (IEM_IS_MODRM_MEM_MODE(bRm))
3170 {
3171 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3172
3173 IEM_MC_BEGIN(0, 2, 0, 0);
3174 IEM_MC_LOCAL(uint64_t, uSrc);
3175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3176
3177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3179 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3180 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3181
3182 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3183 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3184
3185 IEM_MC_ADVANCE_RIP_AND_FINISH();
3186 IEM_MC_END();
3187 }
3188
3189 /**
3190 * @opdone
3191 * @opmnemonic ud660f17m3
3192 * @opcode 0x17
3193 * @opcodesub 11 mr/reg
3194 * @oppfx 0x66
3195 * @opunused immediate
3196 * @opcpuid sse
3197 * @optest ->
3198 */
3199 else
3200 IEMOP_RAISE_INVALID_OPCODE_RET();
3201}
3202
3203
3204/**
3205 * @opdone
3206 * @opmnemonic udf30f17
3207 * @opcode 0x17
3208 * @oppfx 0xf3
3209 * @opunused intel-modrm
3210 * @opcpuid sse
3211 * @optest ->
3212 * @opdone
3213 */
3214
3215/**
3216 * @opmnemonic udf20f17
3217 * @opcode 0x17
3218 * @oppfx 0xf2
3219 * @opunused intel-modrm
3220 * @opcpuid sse
3221 * @optest ->
3222 * @opdone
3223 */
3224
3225
3226/** Opcode 0x0f 0x18. */
3227FNIEMOP_DEF(iemOp_prefetch_Grp16)
3228{
3229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3230 if (IEM_IS_MODRM_MEM_MODE(bRm))
3231 {
3232 switch (IEM_GET_MODRM_REG_8(bRm))
3233 {
3234 case 4: /* Aliased to /0 for the time being according to AMD. */
3235 case 5: /* Aliased to /0 for the time being according to AMD. */
3236 case 6: /* Aliased to /0 for the time being according to AMD. */
3237 case 7: /* Aliased to /0 for the time being according to AMD. */
3238 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3239 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3240 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3241 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3243 }
3244
3245 IEM_MC_BEGIN(0, 1, 0, 0);
3246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3249 /* Currently a NOP. */
3250 NOREF(GCPtrEffSrc);
3251 IEM_MC_ADVANCE_RIP_AND_FINISH();
3252 IEM_MC_END();
3253 }
3254 else
3255 IEMOP_RAISE_INVALID_OPCODE_RET();
3256}
3257
3258
3259/** Opcode 0x0f 0x19..0x1f. */
3260FNIEMOP_DEF(iemOp_nop_Ev)
3261{
3262 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3264 if (IEM_IS_MODRM_REG_MODE(bRm))
3265 {
3266 IEM_MC_BEGIN(0, 0, 0, 0);
3267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3268 IEM_MC_ADVANCE_RIP_AND_FINISH();
3269 IEM_MC_END();
3270 }
3271 else
3272 {
3273 IEM_MC_BEGIN(0, 1, 0, 0);
3274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3277 /* Currently a NOP. */
3278 NOREF(GCPtrEffSrc);
3279 IEM_MC_ADVANCE_RIP_AND_FINISH();
3280 IEM_MC_END();
3281 }
3282}
3283
3284
3285/** Opcode 0x0f 0x20. */
3286FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3287{
3288 /* mod is ignored, as is operand size overrides. */
3289 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3290 IEMOP_HLP_MIN_386();
3291 if (IEM_IS_64BIT_CODE(pVCpu))
3292 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3293 else
3294 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3295
3296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3297 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3298 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3299 {
3300 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3301 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3302 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3303 iCrReg |= 8;
3304 }
3305 switch (iCrReg)
3306 {
3307 case 0: case 2: case 3: case 4: case 8:
3308 break;
3309 default:
3310 IEMOP_RAISE_INVALID_OPCODE_RET();
3311 }
3312 IEMOP_HLP_DONE_DECODING();
3313
3314 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3315}
3316
3317
3318/** Opcode 0x0f 0x21. */
3319FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3320{
3321 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3322 IEMOP_HLP_MIN_386();
3323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3325 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3326 IEMOP_RAISE_INVALID_OPCODE_RET();
3327 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3328}
3329
3330
3331/** Opcode 0x0f 0x22. */
3332FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3333{
3334 /* mod is ignored, as is operand size overrides. */
3335 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3336 IEMOP_HLP_MIN_386();
3337 if (IEM_IS_64BIT_CODE(pVCpu))
3338 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3339 else
3340 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3341
3342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3343 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3344 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3345 {
3346 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3347 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3348 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3349 iCrReg |= 8;
3350 }
3351 switch (iCrReg)
3352 {
3353 case 0: case 2: case 3: case 4: case 8:
3354 break;
3355 default:
3356 IEMOP_RAISE_INVALID_OPCODE_RET();
3357 }
3358 IEMOP_HLP_DONE_DECODING();
3359
3360 if (iCrReg & (2 | 8))
3361 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3362 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3363 else
3364 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3365 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3366}
3367
3368
3369/** Opcode 0x0f 0x23. */
3370FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3371{
3372 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3373 IEMOP_HLP_MIN_386();
3374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3376 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3377 IEMOP_RAISE_INVALID_OPCODE_RET();
3378 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3379 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3380}
3381
3382
3383/** Opcode 0x0f 0x24. */
3384FNIEMOP_DEF(iemOp_mov_Rd_Td)
3385{
3386 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3387 IEMOP_HLP_MIN_386();
3388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3390 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3391 IEMOP_RAISE_INVALID_OPCODE_RET();
3392 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3393}
3394
3395
3396/** Opcode 0x0f 0x26. */
3397FNIEMOP_DEF(iemOp_mov_Td_Rd)
3398{
3399 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3400 IEMOP_HLP_MIN_386();
3401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3404 IEMOP_RAISE_INVALID_OPCODE_RET();
3405 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3406}
3407
3408
3409/**
3410 * @opcode 0x28
3411 * @oppfx none
3412 * @opcpuid sse
3413 * @opgroup og_sse_simdfp_datamove
3414 * @opxcpttype 1
3415 * @optest op1=1 op2=2 -> op1=2
3416 * @optest op1=0 op2=-42 -> op1=-42
3417 */
3418FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3419{
3420 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3422 if (IEM_IS_MODRM_REG_MODE(bRm))
3423 {
3424 /*
3425 * Register, register.
3426 */
3427 IEM_MC_BEGIN(0, 0, 0, 0);
3428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3429 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3430 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3431 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3432 IEM_GET_MODRM_RM(pVCpu, bRm));
3433 IEM_MC_ADVANCE_RIP_AND_FINISH();
3434 IEM_MC_END();
3435 }
3436 else
3437 {
3438 /*
3439 * Register, memory.
3440 */
3441 IEM_MC_BEGIN(0, 2, 0, 0);
3442 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3444
3445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3447 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3449
3450 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3451 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3452
3453 IEM_MC_ADVANCE_RIP_AND_FINISH();
3454 IEM_MC_END();
3455 }
3456}
3457
3458/**
3459 * @opcode 0x28
3460 * @oppfx 66
3461 * @opcpuid sse2
3462 * @opgroup og_sse2_pcksclr_datamove
3463 * @opxcpttype 1
3464 * @optest op1=1 op2=2 -> op1=2
3465 * @optest op1=0 op2=-42 -> op1=-42
3466 */
3467FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3468{
3469 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3471 if (IEM_IS_MODRM_REG_MODE(bRm))
3472 {
3473 /*
3474 * Register, register.
3475 */
3476 IEM_MC_BEGIN(0, 0, 0, 0);
3477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3478 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3479 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3480 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3481 IEM_GET_MODRM_RM(pVCpu, bRm));
3482 IEM_MC_ADVANCE_RIP_AND_FINISH();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /*
3488 * Register, memory.
3489 */
3490 IEM_MC_BEGIN(0, 2, 0, 0);
3491 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3496 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3498
3499 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3500 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3501
3502 IEM_MC_ADVANCE_RIP_AND_FINISH();
3503 IEM_MC_END();
3504 }
3505}
3506
3507/* Opcode 0xf3 0x0f 0x28 - invalid */
3508/* Opcode 0xf2 0x0f 0x28 - invalid */
3509
3510/**
3511 * @opcode 0x29
3512 * @oppfx none
3513 * @opcpuid sse
3514 * @opgroup og_sse_simdfp_datamove
3515 * @opxcpttype 1
3516 * @optest op1=1 op2=2 -> op1=2
3517 * @optest op1=0 op2=-42 -> op1=-42
3518 */
3519FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3520{
3521 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3523 if (IEM_IS_MODRM_REG_MODE(bRm))
3524 {
3525 /*
3526 * Register, register.
3527 */
3528 IEM_MC_BEGIN(0, 0, 0, 0);
3529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3531 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3532 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3533 IEM_GET_MODRM_REG(pVCpu, bRm));
3534 IEM_MC_ADVANCE_RIP_AND_FINISH();
3535 IEM_MC_END();
3536 }
3537 else
3538 {
3539 /*
3540 * Memory, register.
3541 */
3542 IEM_MC_BEGIN(0, 2, 0, 0);
3543 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3545
3546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3548 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3549 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3550
3551 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3552 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3553
3554 IEM_MC_ADVANCE_RIP_AND_FINISH();
3555 IEM_MC_END();
3556 }
3557}
3558
3559/**
3560 * @opcode 0x29
3561 * @oppfx 66
3562 * @opcpuid sse2
3563 * @opgroup og_sse2_pcksclr_datamove
3564 * @opxcpttype 1
3565 * @optest op1=1 op2=2 -> op1=2
3566 * @optest op1=0 op2=-42 -> op1=-42
3567 */
3568FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3569{
3570 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3572 if (IEM_IS_MODRM_REG_MODE(bRm))
3573 {
3574 /*
3575 * Register, register.
3576 */
3577 IEM_MC_BEGIN(0, 0, 0, 0);
3578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3579 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3580 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3581 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3582 IEM_GET_MODRM_REG(pVCpu, bRm));
3583 IEM_MC_ADVANCE_RIP_AND_FINISH();
3584 IEM_MC_END();
3585 }
3586 else
3587 {
3588 /*
3589 * Memory, register.
3590 */
3591 IEM_MC_BEGIN(0, 2, 0, 0);
3592 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3594
3595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3597 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3598 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3599
3600 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3601 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3602
3603 IEM_MC_ADVANCE_RIP_AND_FINISH();
3604 IEM_MC_END();
3605 }
3606}
3607
3608/* Opcode 0xf3 0x0f 0x29 - invalid */
3609/* Opcode 0xf2 0x0f 0x29 - invalid */
3610
3611
3612/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3613FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3614{
3615 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3617 if (IEM_IS_MODRM_REG_MODE(bRm))
3618 {
3619 /*
3620 * XMM, MMX
3621 */
3622 IEM_MC_BEGIN(3, 1, 0, 0);
3623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3624 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3625 IEM_MC_LOCAL(X86XMMREG, Dst);
3626 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3627 IEM_MC_ARG(uint64_t, u64Src, 2);
3628 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3629 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3630 IEM_MC_PREPARE_FPU_USAGE();
3631 IEM_MC_FPU_TO_MMX_MODE();
3632
3633 IEM_MC_REF_MXCSR(pfMxcsr);
3634 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3635 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3636
3637 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3638 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3639 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3640 } IEM_MC_ELSE() {
3641 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3642 } IEM_MC_ENDIF();
3643
3644 IEM_MC_ADVANCE_RIP_AND_FINISH();
3645 IEM_MC_END();
3646 }
3647 else
3648 {
3649 /*
3650 * XMM, [mem64]
3651 */
3652 IEM_MC_BEGIN(3, 2, 0, 0);
3653 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3654 IEM_MC_LOCAL(X86XMMREG, Dst);
3655 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3656 IEM_MC_ARG(uint64_t, u64Src, 2);
3657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3658
3659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3661 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3662 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3663 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3664
3665 IEM_MC_PREPARE_FPU_USAGE();
3666 IEM_MC_FPU_TO_MMX_MODE();
3667 IEM_MC_REF_MXCSR(pfMxcsr);
3668
3669 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3670 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3671 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3672 } IEM_MC_ELSE() {
3673 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3674 } IEM_MC_ENDIF();
3675
3676 IEM_MC_ADVANCE_RIP_AND_FINISH();
3677 IEM_MC_END();
3678 }
3679}
3680
3681
3682/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3683FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3684{
3685 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3687 if (IEM_IS_MODRM_REG_MODE(bRm))
3688 {
3689 /*
3690 * XMM, MMX
3691 */
3692 IEM_MC_BEGIN(3, 1, 0, 0);
3693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3694 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3695 IEM_MC_LOCAL(X86XMMREG, Dst);
3696 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3697 IEM_MC_ARG(uint64_t, u64Src, 2);
3698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3700 IEM_MC_PREPARE_FPU_USAGE();
3701 IEM_MC_FPU_TO_MMX_MODE();
3702
3703 IEM_MC_REF_MXCSR(pfMxcsr);
3704 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3705
3706 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3707 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3708 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3709 } IEM_MC_ELSE() {
3710 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3711 } IEM_MC_ENDIF();
3712
3713 IEM_MC_ADVANCE_RIP_AND_FINISH();
3714 IEM_MC_END();
3715 }
3716 else
3717 {
3718 /*
3719 * XMM, [mem64]
3720 */
3721 IEM_MC_BEGIN(3, 3, 0, 0);
3722 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3723 IEM_MC_LOCAL(X86XMMREG, Dst);
3724 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3725 IEM_MC_ARG(uint64_t, u64Src, 2);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3727
3728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3730 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3731 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3732 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3733
3734 /* Doesn't cause a transition to MMX mode. */
3735 IEM_MC_PREPARE_SSE_USAGE();
3736 IEM_MC_REF_MXCSR(pfMxcsr);
3737
3738 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3739 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3740 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3741 } IEM_MC_ELSE() {
3742 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3743 } IEM_MC_ENDIF();
3744
3745 IEM_MC_ADVANCE_RIP_AND_FINISH();
3746 IEM_MC_END();
3747 }
3748}
3749
3750
3751/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3752FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3753{
3754 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3755
3756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3757 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3758 {
3759 if (IEM_IS_MODRM_REG_MODE(bRm))
3760 {
3761 /* XMM, greg64 */
3762 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3763 IEM_MC_LOCAL(uint32_t, fMxcsr);
3764 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3765 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3766 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3767 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3768
3769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3770 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3771 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3772
3773 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3774 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3775 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3776 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3777 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3778 } IEM_MC_ELSE() {
3779 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3780 } IEM_MC_ENDIF();
3781
3782 IEM_MC_ADVANCE_RIP_AND_FINISH();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 /* XMM, [mem64] */
3788 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3790 IEM_MC_LOCAL(uint32_t, fMxcsr);
3791 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3792 IEM_MC_LOCAL(int64_t, i64Src);
3793 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3794 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3795 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3796
3797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3799 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3800 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3801
3802 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3803 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3804 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3805 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3806 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3807 } IEM_MC_ELSE() {
3808 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3809 } IEM_MC_ENDIF();
3810
3811 IEM_MC_ADVANCE_RIP_AND_FINISH();
3812 IEM_MC_END();
3813 }
3814 }
3815 else
3816 {
3817 if (IEM_IS_MODRM_REG_MODE(bRm))
3818 {
3819 /* greg, XMM */
3820 IEM_MC_BEGIN(3, 2, 0, 0);
3821 IEM_MC_LOCAL(uint32_t, fMxcsr);
3822 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3823 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3824 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3825 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3826
3827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3828 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3829 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3830
3831 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3832 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3833 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3834 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3835 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3836 } IEM_MC_ELSE() {
3837 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3838 } IEM_MC_ENDIF();
3839
3840 IEM_MC_ADVANCE_RIP_AND_FINISH();
3841 IEM_MC_END();
3842 }
3843 else
3844 {
3845 /* greg, [mem32] */
3846 IEM_MC_BEGIN(3, 4, 0, 0);
3847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3848 IEM_MC_LOCAL(uint32_t, fMxcsr);
3849 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3850 IEM_MC_LOCAL(int32_t, i32Src);
3851 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3852 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3853 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3854
3855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3857 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3858 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3859
3860 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3861 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3862 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3863 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3864 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3865 } IEM_MC_ELSE() {
3866 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3867 } IEM_MC_ENDIF();
3868
3869 IEM_MC_ADVANCE_RIP_AND_FINISH();
3870 IEM_MC_END();
3871 }
3872 }
3873}
3874
3875
3876/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3877FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3878{
3879 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3880
3881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3882 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3883 {
3884 if (IEM_IS_MODRM_REG_MODE(bRm))
3885 {
3886 /* XMM, greg64 */
3887 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3888 IEM_MC_LOCAL(uint32_t, fMxcsr);
3889 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3890 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3891 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3892 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3893
3894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3895 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3896 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3897
3898 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3899 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3900 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3901 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3902 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3903 } IEM_MC_ELSE() {
3904 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3905 } IEM_MC_ENDIF();
3906
3907 IEM_MC_ADVANCE_RIP_AND_FINISH();
3908 IEM_MC_END();
3909 }
3910 else
3911 {
3912 /* XMM, [mem64] */
3913 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3915 IEM_MC_LOCAL(uint32_t, fMxcsr);
3916 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3917 IEM_MC_LOCAL(int64_t, i64Src);
3918 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3919 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3920 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3921
3922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3924 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3925 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3926
3927 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3928 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3929 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3930 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3931 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3932 } IEM_MC_ELSE() {
3933 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3934 } IEM_MC_ENDIF();
3935
3936 IEM_MC_ADVANCE_RIP_AND_FINISH();
3937 IEM_MC_END();
3938 }
3939 }
3940 else
3941 {
3942 if (IEM_IS_MODRM_REG_MODE(bRm))
3943 {
3944 /* XMM, greg32 */
3945 IEM_MC_BEGIN(3, 2, 0, 0);
3946 IEM_MC_LOCAL(uint32_t, fMxcsr);
3947 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3948 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3949 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3950 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3951
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3953 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3954 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3955
3956 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3957 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3958 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3959 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3960 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3961 } IEM_MC_ELSE() {
3962 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3963 } IEM_MC_ENDIF();
3964
3965 IEM_MC_ADVANCE_RIP_AND_FINISH();
3966 IEM_MC_END();
3967 }
3968 else
3969 {
3970 /* XMM, [mem32] */
3971 IEM_MC_BEGIN(3, 4, 0, 0);
3972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3973 IEM_MC_LOCAL(uint32_t, fMxcsr);
3974 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3975 IEM_MC_LOCAL(int32_t, i32Src);
3976 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3977 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3978 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3979
3980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3983 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3984
3985 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3986 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3987 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3988 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3989 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3990 } IEM_MC_ELSE() {
3991 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3992 } IEM_MC_ENDIF();
3993
3994 IEM_MC_ADVANCE_RIP_AND_FINISH();
3995 IEM_MC_END();
3996 }
3997 }
3998}
3999
4000
4001/**
4002 * @opcode 0x2b
4003 * @opcodesub !11 mr/reg
4004 * @oppfx none
4005 * @opcpuid sse
4006 * @opgroup og_sse1_cachect
4007 * @opxcpttype 1
4008 * @optest op1=1 op2=2 -> op1=2
4009 * @optest op1=0 op2=-42 -> op1=-42
4010 */
4011FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4012{
4013 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4015 if (IEM_IS_MODRM_MEM_MODE(bRm))
4016 {
4017 /*
4018 * memory, register.
4019 */
4020 IEM_MC_BEGIN(0, 2, 0, 0);
4021 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4023
4024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4026 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4027 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4028
4029 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4030 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4031
4032 IEM_MC_ADVANCE_RIP_AND_FINISH();
4033 IEM_MC_END();
4034 }
4035 /* The register, register encoding is invalid. */
4036 else
4037 IEMOP_RAISE_INVALID_OPCODE_RET();
4038}
4039
4040/**
4041 * @opcode 0x2b
4042 * @opcodesub !11 mr/reg
4043 * @oppfx 0x66
4044 * @opcpuid sse2
4045 * @opgroup og_sse2_cachect
4046 * @opxcpttype 1
4047 * @optest op1=1 op2=2 -> op1=2
4048 * @optest op1=0 op2=-42 -> op1=-42
4049 */
4050FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4051{
4052 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4054 if (IEM_IS_MODRM_MEM_MODE(bRm))
4055 {
4056 /*
4057 * memory, register.
4058 */
4059 IEM_MC_BEGIN(0, 2, 0, 0);
4060 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4062
4063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4065 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4066 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4067
4068 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4069 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4070
4071 IEM_MC_ADVANCE_RIP_AND_FINISH();
4072 IEM_MC_END();
4073 }
4074 /* The register, register encoding is invalid. */
4075 else
4076 IEMOP_RAISE_INVALID_OPCODE_RET();
4077}
4078/* Opcode 0xf3 0x0f 0x2b - invalid */
4079/* Opcode 0xf2 0x0f 0x2b - invalid */
4080
4081
4082/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4083FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4084{
4085 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4087 if (IEM_IS_MODRM_REG_MODE(bRm))
4088 {
4089 /*
4090 * Register, register.
4091 */
4092 IEM_MC_BEGIN(3, 1, 0, 0);
4093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4094 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4095 IEM_MC_LOCAL(uint64_t, u64Dst);
4096 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4097 IEM_MC_ARG(uint64_t, u64Src, 2);
4098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4099 IEM_MC_PREPARE_FPU_USAGE();
4100 IEM_MC_FPU_TO_MMX_MODE();
4101
4102 IEM_MC_REF_MXCSR(pfMxcsr);
4103 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4104
4105 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4106 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4107 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4108 } IEM_MC_ELSE() {
4109 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4110 } IEM_MC_ENDIF();
4111
4112 IEM_MC_ADVANCE_RIP_AND_FINISH();
4113 IEM_MC_END();
4114 }
4115 else
4116 {
4117 /*
4118 * Register, memory.
4119 */
4120 IEM_MC_BEGIN(3, 2, 0, 0);
4121 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4122 IEM_MC_LOCAL(uint64_t, u64Dst);
4123 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4124 IEM_MC_ARG(uint64_t, u64Src, 2);
4125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4126
4127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4129 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4130 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4131
4132 IEM_MC_PREPARE_FPU_USAGE();
4133 IEM_MC_FPU_TO_MMX_MODE();
4134 IEM_MC_REF_MXCSR(pfMxcsr);
4135
4136 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4137 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4138 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4139 } IEM_MC_ELSE() {
4140 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4141 } IEM_MC_ENDIF();
4142
4143 IEM_MC_ADVANCE_RIP_AND_FINISH();
4144 IEM_MC_END();
4145 }
4146}
4147
4148
4149/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4150FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4151{
4152 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4154 if (IEM_IS_MODRM_REG_MODE(bRm))
4155 {
4156 /*
4157 * Register, register.
4158 */
4159 IEM_MC_BEGIN(3, 1, 0, 0);
4160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4161 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4162 IEM_MC_LOCAL(uint64_t, u64Dst);
4163 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4164 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4165 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4166 IEM_MC_PREPARE_FPU_USAGE();
4167 IEM_MC_FPU_TO_MMX_MODE();
4168
4169 IEM_MC_REF_MXCSR(pfMxcsr);
4170 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4171
4172 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4173 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4174 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4175 } IEM_MC_ELSE() {
4176 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4177 } IEM_MC_ENDIF();
4178
4179 IEM_MC_ADVANCE_RIP_AND_FINISH();
4180 IEM_MC_END();
4181 }
4182 else
4183 {
4184 /*
4185 * Register, memory.
4186 */
4187 IEM_MC_BEGIN(3, 3, 0, 0);
4188 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4189 IEM_MC_LOCAL(uint64_t, u64Dst);
4190 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4191 IEM_MC_LOCAL(X86XMMREG, uSrc);
4192 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4194
4195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4197 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4198 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4199
4200 IEM_MC_PREPARE_FPU_USAGE();
4201 IEM_MC_FPU_TO_MMX_MODE();
4202
4203 IEM_MC_REF_MXCSR(pfMxcsr);
4204
4205 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4206 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4207 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4208 } IEM_MC_ELSE() {
4209 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4210 } IEM_MC_ENDIF();
4211
4212 IEM_MC_ADVANCE_RIP_AND_FINISH();
4213 IEM_MC_END();
4214 }
4215}
4216
4217
4218/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4219FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4220{
4221 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4222
4223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4224 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4225 {
4226 if (IEM_IS_MODRM_REG_MODE(bRm))
4227 {
4228 /* greg64, XMM */
4229 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4230 IEM_MC_LOCAL(uint32_t, fMxcsr);
4231 IEM_MC_LOCAL(int64_t, i64Dst);
4232 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4233 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4234 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4235
4236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4237 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4238 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4239
4240 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4241 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4242 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4243 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4244 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4245 } IEM_MC_ELSE() {
4246 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4247 } IEM_MC_ENDIF();
4248
4249 IEM_MC_ADVANCE_RIP_AND_FINISH();
4250 IEM_MC_END();
4251 }
4252 else
4253 {
4254 /* greg64, [mem64] */
4255 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4257 IEM_MC_LOCAL(uint32_t, fMxcsr);
4258 IEM_MC_LOCAL(int64_t, i64Dst);
4259 IEM_MC_LOCAL(uint32_t, u32Src);
4260 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4261 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4262 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4263
4264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4266 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4267 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4268
4269 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4270 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4271 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4272 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4273 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4274 } IEM_MC_ELSE() {
4275 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4276 } IEM_MC_ENDIF();
4277
4278 IEM_MC_ADVANCE_RIP_AND_FINISH();
4279 IEM_MC_END();
4280 }
4281 }
4282 else
4283 {
4284 if (IEM_IS_MODRM_REG_MODE(bRm))
4285 {
4286 /* greg, XMM */
4287 IEM_MC_BEGIN(3, 2, 0, 0);
4288 IEM_MC_LOCAL(uint32_t, fMxcsr);
4289 IEM_MC_LOCAL(int32_t, i32Dst);
4290 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4291 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4292 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4293
4294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4296 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4297
4298 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4299 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4300 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4301 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4302 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4303 } IEM_MC_ELSE() {
4304 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4305 } IEM_MC_ENDIF();
4306
4307 IEM_MC_ADVANCE_RIP_AND_FINISH();
4308 IEM_MC_END();
4309 }
4310 else
4311 {
4312 /* greg, [mem] */
4313 IEM_MC_BEGIN(3, 4, 0, 0);
4314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4315 IEM_MC_LOCAL(uint32_t, fMxcsr);
4316 IEM_MC_LOCAL(int32_t, i32Dst);
4317 IEM_MC_LOCAL(uint32_t, u32Src);
4318 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4319 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4320 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4321
4322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4324 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4325 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4326
4327 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4328 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4329 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4330 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4331 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4332 } IEM_MC_ELSE() {
4333 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4334 } IEM_MC_ENDIF();
4335
4336 IEM_MC_ADVANCE_RIP_AND_FINISH();
4337 IEM_MC_END();
4338 }
4339 }
4340}
4341
4342
4343/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4344FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4345{
4346 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4347
4348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4349 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4350 {
4351 if (IEM_IS_MODRM_REG_MODE(bRm))
4352 {
4353 /* greg64, XMM */
4354 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4355 IEM_MC_LOCAL(uint32_t, fMxcsr);
4356 IEM_MC_LOCAL(int64_t, i64Dst);
4357 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4358 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4359 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4360
4361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4362 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4363 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4364
4365 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4366 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4367 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4368 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4369 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4370 } IEM_MC_ELSE() {
4371 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4372 } IEM_MC_ENDIF();
4373
4374 IEM_MC_ADVANCE_RIP_AND_FINISH();
4375 IEM_MC_END();
4376 }
4377 else
4378 {
4379 /* greg64, [mem64] */
4380 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4382 IEM_MC_LOCAL(uint32_t, fMxcsr);
4383 IEM_MC_LOCAL(int64_t, i64Dst);
4384 IEM_MC_LOCAL(uint64_t, u64Src);
4385 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4386 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4387 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4388
4389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4391 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4392 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4393
4394 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4395 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4396 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4397 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4398 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4399 } IEM_MC_ELSE() {
4400 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4401 } IEM_MC_ENDIF();
4402
4403 IEM_MC_ADVANCE_RIP_AND_FINISH();
4404 IEM_MC_END();
4405 }
4406 }
4407 else
4408 {
4409 if (IEM_IS_MODRM_REG_MODE(bRm))
4410 {
4411 /* greg, XMM */
4412 IEM_MC_BEGIN(3, 2, 0, 0);
4413 IEM_MC_LOCAL(uint32_t, fMxcsr);
4414 IEM_MC_LOCAL(int32_t, i32Dst);
4415 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4416 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4417 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4418
4419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4420 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4421 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4422
4423 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4424 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4425 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4426 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4427 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4428 } IEM_MC_ELSE() {
4429 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4430 } IEM_MC_ENDIF();
4431
4432 IEM_MC_ADVANCE_RIP_AND_FINISH();
4433 IEM_MC_END();
4434 }
4435 else
4436 {
4437 /* greg32, [mem32] */
4438 IEM_MC_BEGIN(3, 4, 0, 0);
4439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4440 IEM_MC_LOCAL(uint32_t, fMxcsr);
4441 IEM_MC_LOCAL(int32_t, i32Dst);
4442 IEM_MC_LOCAL(uint64_t, u64Src);
4443 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4444 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4445 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4446
4447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4449 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4450 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4451
4452 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4453 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4454 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4455 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4456 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4457 } IEM_MC_ELSE() {
4458 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4459 } IEM_MC_ENDIF();
4460
4461 IEM_MC_ADVANCE_RIP_AND_FINISH();
4462 IEM_MC_END();
4463 }
4464 }
4465}
4466
4467
4468/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4469FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4470{
4471 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4473 if (IEM_IS_MODRM_REG_MODE(bRm))
4474 {
4475 /*
4476 * Register, register.
4477 */
4478 IEM_MC_BEGIN(3, 1, 0, 0);
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4480 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4481 IEM_MC_LOCAL(uint64_t, u64Dst);
4482 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4483 IEM_MC_ARG(uint64_t, u64Src, 2);
4484
4485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4486 IEM_MC_PREPARE_FPU_USAGE();
4487 IEM_MC_FPU_TO_MMX_MODE();
4488
4489 IEM_MC_REF_MXCSR(pfMxcsr);
4490 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4491
4492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4493 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4494 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4495 } IEM_MC_ELSE() {
4496 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4497 } IEM_MC_ENDIF();
4498
4499 IEM_MC_ADVANCE_RIP_AND_FINISH();
4500 IEM_MC_END();
4501 }
4502 else
4503 {
4504 /*
4505 * Register, memory.
4506 */
4507 IEM_MC_BEGIN(3, 2, 0, 0);
4508 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4509 IEM_MC_LOCAL(uint64_t, u64Dst);
4510 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4511 IEM_MC_ARG(uint64_t, u64Src, 2);
4512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4513
4514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4517 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4518
4519 IEM_MC_PREPARE_FPU_USAGE();
4520 IEM_MC_FPU_TO_MMX_MODE();
4521 IEM_MC_REF_MXCSR(pfMxcsr);
4522
4523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4524 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4525 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4526 } IEM_MC_ELSE() {
4527 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4528 } IEM_MC_ENDIF();
4529
4530 IEM_MC_ADVANCE_RIP_AND_FINISH();
4531 IEM_MC_END();
4532 }
4533}
4534
4535
4536/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4537FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4538{
4539 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4541 if (IEM_IS_MODRM_REG_MODE(bRm))
4542 {
4543 /*
4544 * Register, register.
4545 */
4546 IEM_MC_BEGIN(3, 1, 0, 0);
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4548 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4549 IEM_MC_LOCAL(uint64_t, u64Dst);
4550 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4551 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4552
4553 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4554 IEM_MC_PREPARE_FPU_USAGE();
4555 IEM_MC_FPU_TO_MMX_MODE();
4556
4557 IEM_MC_REF_MXCSR(pfMxcsr);
4558 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4559
4560 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4561 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4562 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4563 } IEM_MC_ELSE() {
4564 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4565 } IEM_MC_ENDIF();
4566
4567 IEM_MC_ADVANCE_RIP_AND_FINISH();
4568 IEM_MC_END();
4569 }
4570 else
4571 {
4572 /*
4573 * Register, memory.
4574 */
4575 IEM_MC_BEGIN(3, 3, 0, 0);
4576 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4577 IEM_MC_LOCAL(uint64_t, u64Dst);
4578 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4579 IEM_MC_LOCAL(X86XMMREG, uSrc);
4580 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4582
4583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4585 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4586 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4587
4588 IEM_MC_PREPARE_FPU_USAGE();
4589 IEM_MC_FPU_TO_MMX_MODE();
4590
4591 IEM_MC_REF_MXCSR(pfMxcsr);
4592
4593 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4594 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4595 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4596 } IEM_MC_ELSE() {
4597 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4598 } IEM_MC_ENDIF();
4599
4600 IEM_MC_ADVANCE_RIP_AND_FINISH();
4601 IEM_MC_END();
4602 }
4603}
4604
4605
4606/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4607FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4608{
4609 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4610
4611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4612 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4613 {
4614 if (IEM_IS_MODRM_REG_MODE(bRm))
4615 {
4616 /* greg64, XMM */
4617 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4618 IEM_MC_LOCAL(uint32_t, fMxcsr);
4619 IEM_MC_LOCAL(int64_t, i64Dst);
4620 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4621 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4622 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4623
4624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4625 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4626 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4627
4628 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4629 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4630 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4631 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4632 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4633 } IEM_MC_ELSE() {
4634 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4635 } IEM_MC_ENDIF();
4636
4637 IEM_MC_ADVANCE_RIP_AND_FINISH();
4638 IEM_MC_END();
4639 }
4640 else
4641 {
4642 /* greg64, [mem64] */
4643 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4645 IEM_MC_LOCAL(uint32_t, fMxcsr);
4646 IEM_MC_LOCAL(int64_t, i64Dst);
4647 IEM_MC_LOCAL(uint32_t, u32Src);
4648 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4649 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4650 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4651
4652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4654 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4655 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4656
4657 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4658 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4659 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4660 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4661 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4662 } IEM_MC_ELSE() {
4663 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4664 } IEM_MC_ENDIF();
4665
4666 IEM_MC_ADVANCE_RIP_AND_FINISH();
4667 IEM_MC_END();
4668 }
4669 }
4670 else
4671 {
4672 if (IEM_IS_MODRM_REG_MODE(bRm))
4673 {
4674 /* greg, XMM */
4675 IEM_MC_BEGIN(3, 2, 0, 0);
4676 IEM_MC_LOCAL(uint32_t, fMxcsr);
4677 IEM_MC_LOCAL(int32_t, i32Dst);
4678 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4679 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4680 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4681
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4683 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4684 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4685
4686 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4687 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4688 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4689 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4690 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4691 } IEM_MC_ELSE() {
4692 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4693 } IEM_MC_ENDIF();
4694
4695 IEM_MC_ADVANCE_RIP_AND_FINISH();
4696 IEM_MC_END();
4697 }
4698 else
4699 {
4700 /* greg, [mem] */
4701 IEM_MC_BEGIN(3, 4, 0, 0);
4702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4703 IEM_MC_LOCAL(uint32_t, fMxcsr);
4704 IEM_MC_LOCAL(int32_t, i32Dst);
4705 IEM_MC_LOCAL(uint32_t, u32Src);
4706 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4707 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4708 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4709
4710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4712 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4713 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4714
4715 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4716 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4717 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4718 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4719 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4720 } IEM_MC_ELSE() {
4721 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4722 } IEM_MC_ENDIF();
4723
4724 IEM_MC_ADVANCE_RIP_AND_FINISH();
4725 IEM_MC_END();
4726 }
4727 }
4728}
4729
4730
4731/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4732FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4733{
4734 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4735
4736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4738 {
4739 if (IEM_IS_MODRM_REG_MODE(bRm))
4740 {
4741 /* greg64, XMM */
4742 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4743 IEM_MC_LOCAL(uint32_t, fMxcsr);
4744 IEM_MC_LOCAL(int64_t, i64Dst);
4745 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4746 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4747 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4748
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4750 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4751 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4752
4753 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4754 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4755 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4756 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4757 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4758 } IEM_MC_ELSE() {
4759 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4760 } IEM_MC_ENDIF();
4761
4762 IEM_MC_ADVANCE_RIP_AND_FINISH();
4763 IEM_MC_END();
4764 }
4765 else
4766 {
4767 /* greg64, [mem64] */
4768 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4770 IEM_MC_LOCAL(uint32_t, fMxcsr);
4771 IEM_MC_LOCAL(int64_t, i64Dst);
4772 IEM_MC_LOCAL(uint64_t, u64Src);
4773 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4774 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4775 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4776
4777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4779 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4780 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4781
4782 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4783 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4784 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4785 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4786 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4787 } IEM_MC_ELSE() {
4788 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4789 } IEM_MC_ENDIF();
4790
4791 IEM_MC_ADVANCE_RIP_AND_FINISH();
4792 IEM_MC_END();
4793 }
4794 }
4795 else
4796 {
4797 if (IEM_IS_MODRM_REG_MODE(bRm))
4798 {
4799 /* greg32, XMM */
4800 IEM_MC_BEGIN(3, 2, 0, 0);
4801 IEM_MC_LOCAL(uint32_t, fMxcsr);
4802 IEM_MC_LOCAL(int32_t, i32Dst);
4803 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4804 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4805 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4806
4807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4808 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4809 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4810
4811 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4812 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4813 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4814 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4815 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4816 } IEM_MC_ELSE() {
4817 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4818 } IEM_MC_ENDIF();
4819
4820 IEM_MC_ADVANCE_RIP_AND_FINISH();
4821 IEM_MC_END();
4822 }
4823 else
4824 {
4825 /* greg32, [mem64] */
4826 IEM_MC_BEGIN(3, 4, 0, 0);
4827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4828 IEM_MC_LOCAL(uint32_t, fMxcsr);
4829 IEM_MC_LOCAL(int32_t, i32Dst);
4830 IEM_MC_LOCAL(uint64_t, u64Src);
4831 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4832 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4833 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4834
4835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4837 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4838 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4839
4840 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4841 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4842 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4843 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4844 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4845 } IEM_MC_ELSE() {
4846 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4847 } IEM_MC_ENDIF();
4848
4849 IEM_MC_ADVANCE_RIP_AND_FINISH();
4850 IEM_MC_END();
4851 }
4852 }
4853}
4854
4855
4856/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4857FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4858{
4859 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4861 if (IEM_IS_MODRM_REG_MODE(bRm))
4862 {
4863 /*
4864 * Register, register.
4865 */
4866 IEM_MC_BEGIN(4, 1, 0, 0);
4867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4868 IEM_MC_LOCAL(uint32_t, fEFlags);
4869 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4870 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4871 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4872 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4873 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4874 IEM_MC_PREPARE_SSE_USAGE();
4875 IEM_MC_FETCH_EFLAGS(fEFlags);
4876 IEM_MC_REF_MXCSR(pfMxcsr);
4877 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4878 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4879 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4880 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4881 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4882 } IEM_MC_ELSE() {
4883 IEM_MC_COMMIT_EFLAGS(fEFlags);
4884 } IEM_MC_ENDIF();
4885
4886 IEM_MC_ADVANCE_RIP_AND_FINISH();
4887 IEM_MC_END();
4888 }
4889 else
4890 {
4891 /*
4892 * Register, memory.
4893 */
4894 IEM_MC_BEGIN(4, 3, 0, 0);
4895 IEM_MC_LOCAL(uint32_t, fEFlags);
4896 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4897 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4898 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4899 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4900 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4902
4903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4905 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4906 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4907
4908 IEM_MC_PREPARE_SSE_USAGE();
4909 IEM_MC_FETCH_EFLAGS(fEFlags);
4910 IEM_MC_REF_MXCSR(pfMxcsr);
4911 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4912 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4913 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4914 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4915 } IEM_MC_ELSE() {
4916 IEM_MC_COMMIT_EFLAGS(fEFlags);
4917 } IEM_MC_ENDIF();
4918
4919 IEM_MC_ADVANCE_RIP_AND_FINISH();
4920 IEM_MC_END();
4921 }
4922}
4923
4924
4925/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4926FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4927{
4928 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4930 if (IEM_IS_MODRM_REG_MODE(bRm))
4931 {
4932 /*
4933 * Register, register.
4934 */
4935 IEM_MC_BEGIN(4, 1, 0, 0);
4936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4937 IEM_MC_LOCAL(uint32_t, fEFlags);
4938 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4939 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4940 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4941 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4942 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4943 IEM_MC_PREPARE_SSE_USAGE();
4944 IEM_MC_FETCH_EFLAGS(fEFlags);
4945 IEM_MC_REF_MXCSR(pfMxcsr);
4946 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4947 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4948 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4949 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4950 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4951 } IEM_MC_ELSE() {
4952 IEM_MC_COMMIT_EFLAGS(fEFlags);
4953 } IEM_MC_ENDIF();
4954
4955 IEM_MC_ADVANCE_RIP_AND_FINISH();
4956 IEM_MC_END();
4957 }
4958 else
4959 {
4960 /*
4961 * Register, memory.
4962 */
4963 IEM_MC_BEGIN(4, 3, 0, 0);
4964 IEM_MC_LOCAL(uint32_t, fEFlags);
4965 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4966 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4967 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4968 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4969 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4971
4972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4974 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4975 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4976
4977 IEM_MC_PREPARE_SSE_USAGE();
4978 IEM_MC_FETCH_EFLAGS(fEFlags);
4979 IEM_MC_REF_MXCSR(pfMxcsr);
4980 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4981 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4982 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4983 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4984 } IEM_MC_ELSE() {
4985 IEM_MC_COMMIT_EFLAGS(fEFlags);
4986 } IEM_MC_ENDIF();
4987
4988 IEM_MC_ADVANCE_RIP_AND_FINISH();
4989 IEM_MC_END();
4990 }
4991}
4992
4993
4994/* Opcode 0xf3 0x0f 0x2e - invalid */
4995/* Opcode 0xf2 0x0f 0x2e - invalid */
4996
4997
4998/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4999FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5000{
5001 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5003 if (IEM_IS_MODRM_REG_MODE(bRm))
5004 {
5005 /*
5006 * Register, register.
5007 */
5008 IEM_MC_BEGIN(4, 1, 0, 0);
5009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5010 IEM_MC_LOCAL(uint32_t, fEFlags);
5011 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5012 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5013 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5014 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5015 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5016 IEM_MC_PREPARE_SSE_USAGE();
5017 IEM_MC_FETCH_EFLAGS(fEFlags);
5018 IEM_MC_REF_MXCSR(pfMxcsr);
5019 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5020 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5021 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5022 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5023 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5024 } IEM_MC_ELSE() {
5025 IEM_MC_COMMIT_EFLAGS(fEFlags);
5026 } IEM_MC_ENDIF();
5027
5028 IEM_MC_ADVANCE_RIP_AND_FINISH();
5029 IEM_MC_END();
5030 }
5031 else
5032 {
5033 /*
5034 * Register, memory.
5035 */
5036 IEM_MC_BEGIN(4, 3, 0, 0);
5037 IEM_MC_LOCAL(uint32_t, fEFlags);
5038 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5039 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5040 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5041 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5042 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5044
5045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5047 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5048 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5049
5050 IEM_MC_PREPARE_SSE_USAGE();
5051 IEM_MC_FETCH_EFLAGS(fEFlags);
5052 IEM_MC_REF_MXCSR(pfMxcsr);
5053 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5054 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5055 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5056 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5057 } IEM_MC_ELSE() {
5058 IEM_MC_COMMIT_EFLAGS(fEFlags);
5059 } IEM_MC_ENDIF();
5060
5061 IEM_MC_ADVANCE_RIP_AND_FINISH();
5062 IEM_MC_END();
5063 }
5064}
5065
5066
5067/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5068FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5069{
5070 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5072 if (IEM_IS_MODRM_REG_MODE(bRm))
5073 {
5074 /*
5075 * Register, register.
5076 */
5077 IEM_MC_BEGIN(4, 1, 0, 0);
5078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5079 IEM_MC_LOCAL(uint32_t, fEFlags);
5080 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5081 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5082 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5083 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5084 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5085 IEM_MC_PREPARE_SSE_USAGE();
5086 IEM_MC_FETCH_EFLAGS(fEFlags);
5087 IEM_MC_REF_MXCSR(pfMxcsr);
5088 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5089 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5090 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5091 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5092 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5093 } IEM_MC_ELSE() {
5094 IEM_MC_COMMIT_EFLAGS(fEFlags);
5095 } IEM_MC_ENDIF();
5096
5097 IEM_MC_ADVANCE_RIP_AND_FINISH();
5098 IEM_MC_END();
5099 }
5100 else
5101 {
5102 /*
5103 * Register, memory.
5104 */
5105 IEM_MC_BEGIN(4, 3, 0, 0);
5106 IEM_MC_LOCAL(uint32_t, fEFlags);
5107 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5108 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5109 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5110 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5111 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5113
5114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5117 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5118
5119 IEM_MC_PREPARE_SSE_USAGE();
5120 IEM_MC_FETCH_EFLAGS(fEFlags);
5121 IEM_MC_REF_MXCSR(pfMxcsr);
5122 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5123 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5124 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5125 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5126 } IEM_MC_ELSE() {
5127 IEM_MC_COMMIT_EFLAGS(fEFlags);
5128 } IEM_MC_ENDIF();
5129
5130 IEM_MC_ADVANCE_RIP_AND_FINISH();
5131 IEM_MC_END();
5132 }
5133}
5134
5135
5136/* Opcode 0xf3 0x0f 0x2f - invalid */
5137/* Opcode 0xf2 0x0f 0x2f - invalid */
5138
5139/** Opcode 0x0f 0x30. */
5140FNIEMOP_DEF(iemOp_wrmsr)
5141{
5142 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5144 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wrmsr);
5145}
5146
5147
5148/** Opcode 0x0f 0x31. */
5149FNIEMOP_DEF(iemOp_rdtsc)
5150{
5151 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5153 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtsc);
5154}
5155
5156
5157/** Opcode 0x0f 0x33. */
5158FNIEMOP_DEF(iemOp_rdmsr)
5159{
5160 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5162 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdmsr);
5163}
5164
5165
5166/** Opcode 0x0f 0x34. */
5167FNIEMOP_DEF(iemOp_rdpmc)
5168{
5169 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5171 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdpmc);
5172}
5173
5174
5175/** Opcode 0x0f 0x34. */
5176FNIEMOP_DEF(iemOp_sysenter)
5177{
5178 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5180 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5181 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5182 iemCImpl_sysenter);
5183}
5184
5185/** Opcode 0x0f 0x35. */
5186FNIEMOP_DEF(iemOp_sysexit)
5187{
5188 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5190 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5191 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5192 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5193}
5194
5195/** Opcode 0x0f 0x37. */
5196FNIEMOP_STUB(iemOp_getsec);
5197
5198
5199/** Opcode 0x0f 0x38. */
5200FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5201{
5202#ifdef IEM_WITH_THREE_0F_38
5203 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5204 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5205#else
5206 IEMOP_BITCH_ABOUT_STUB();
5207 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5208#endif
5209}
5210
5211
5212/** Opcode 0x0f 0x3a. */
5213FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5214{
5215#ifdef IEM_WITH_THREE_0F_3A
5216 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5217 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5218#else
5219 IEMOP_BITCH_ABOUT_STUB();
5220 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5221#endif
5222}
5223
5224
5225/**
5226 * Implements a conditional move.
5227 *
5228 * Wish there was an obvious way to do this where we could share and reduce
5229 * code bloat.
5230 *
5231 * @param a_Cnd The conditional "microcode" operation.
5232 */
5233#define CMOV_X(a_Cnd) \
5234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5235 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5236 { \
5237 switch (pVCpu->iem.s.enmEffOpSize) \
5238 { \
5239 case IEMMODE_16BIT: \
5240 IEM_MC_BEGIN(0, 1, 0, 0); \
5241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5242 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5243 a_Cnd { \
5244 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5245 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5246 } IEM_MC_ENDIF(); \
5247 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5248 IEM_MC_END(); \
5249 break; \
5250 \
5251 case IEMMODE_32BIT: \
5252 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0); \
5253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5254 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5255 a_Cnd { \
5256 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5257 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5258 } IEM_MC_ELSE() { \
5259 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5260 } IEM_MC_ENDIF(); \
5261 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5262 IEM_MC_END(); \
5263 break; \
5264 \
5265 case IEMMODE_64BIT: \
5266 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0); \
5267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5268 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5269 a_Cnd { \
5270 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5271 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5272 } IEM_MC_ENDIF(); \
5273 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5274 IEM_MC_END(); \
5275 break; \
5276 \
5277 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5278 } \
5279 } \
5280 else \
5281 { \
5282 switch (pVCpu->iem.s.enmEffOpSize) \
5283 { \
5284 case IEMMODE_16BIT: \
5285 IEM_MC_BEGIN(0, 2, 0, 0); \
5286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5287 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5290 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5291 a_Cnd { \
5292 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5293 } IEM_MC_ENDIF(); \
5294 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5295 IEM_MC_END(); \
5296 break; \
5297 \
5298 case IEMMODE_32BIT: \
5299 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0); \
5300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5301 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5304 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5305 a_Cnd { \
5306 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5307 } IEM_MC_ELSE() { \
5308 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5309 } IEM_MC_ENDIF(); \
5310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5311 IEM_MC_END(); \
5312 break; \
5313 \
5314 case IEMMODE_64BIT: \
5315 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0); \
5316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5317 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5320 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5321 a_Cnd { \
5322 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5323 } IEM_MC_ENDIF(); \
5324 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5325 IEM_MC_END(); \
5326 break; \
5327 \
5328 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5329 } \
5330 } do {} while (0)
5331
5332
5333
5334/** Opcode 0x0f 0x40. */
5335FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5336{
5337 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5338 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5339}
5340
5341
5342/** Opcode 0x0f 0x41. */
5343FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5344{
5345 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5346 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5347}
5348
5349
5350/** Opcode 0x0f 0x42. */
5351FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5352{
5353 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5354 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5355}
5356
5357
5358/** Opcode 0x0f 0x43. */
5359FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5360{
5361 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5362 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5363}
5364
5365
5366/** Opcode 0x0f 0x44. */
5367FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5368{
5369 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5370 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5371}
5372
5373
5374/** Opcode 0x0f 0x45. */
5375FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5376{
5377 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5378 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5379}
5380
5381
5382/** Opcode 0x0f 0x46. */
5383FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5384{
5385 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5386 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5387}
5388
5389
5390/** Opcode 0x0f 0x47. */
5391FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5392{
5393 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5394 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5395}
5396
5397
5398/** Opcode 0x0f 0x48. */
5399FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5400{
5401 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5402 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5403}
5404
5405
5406/** Opcode 0x0f 0x49. */
5407FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5408{
5409 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5410 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5411}
5412
5413
5414/** Opcode 0x0f 0x4a. */
5415FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5416{
5417 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5418 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5419}
5420
5421
5422/** Opcode 0x0f 0x4b. */
5423FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5424{
5425 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5426 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5427}
5428
5429
5430/** Opcode 0x0f 0x4c. */
5431FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5432{
5433 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5434 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5435}
5436
5437
5438/** Opcode 0x0f 0x4d. */
5439FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5440{
5441 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5442 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5443}
5444
5445
5446/** Opcode 0x0f 0x4e. */
5447FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5448{
5449 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5450 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5451}
5452
5453
5454/** Opcode 0x0f 0x4f. */
5455FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5456{
5457 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5458 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5459}
5460
5461#undef CMOV_X
5462
5463/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5464FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5465{
5466 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5468 if (IEM_IS_MODRM_REG_MODE(bRm))
5469 {
5470 /*
5471 * Register, register.
5472 */
5473 IEM_MC_BEGIN(2, 1, 0, 0);
5474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5475 IEM_MC_LOCAL(uint8_t, u8Dst);
5476 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5477 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5478 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5479 IEM_MC_PREPARE_SSE_USAGE();
5480 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5481 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5482 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5483 IEM_MC_ADVANCE_RIP_AND_FINISH();
5484 IEM_MC_END();
5485 }
5486 /* No memory operand. */
5487 else
5488 IEMOP_RAISE_INVALID_OPCODE_RET();
5489}
5490
5491
5492/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5493FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5494{
5495 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5497 if (IEM_IS_MODRM_REG_MODE(bRm))
5498 {
5499 /*
5500 * Register, register.
5501 */
5502 IEM_MC_BEGIN(2, 1, 0, 0);
5503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5504 IEM_MC_LOCAL(uint8_t, u8Dst);
5505 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5506 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5507 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5508 IEM_MC_PREPARE_SSE_USAGE();
5509 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5510 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5511 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5512 IEM_MC_ADVANCE_RIP_AND_FINISH();
5513 IEM_MC_END();
5514 }
5515 /* No memory operand. */
5516 else
5517 IEMOP_RAISE_INVALID_OPCODE_RET();
5518
5519}
5520
5521
5522/* Opcode 0xf3 0x0f 0x50 - invalid */
5523/* Opcode 0xf2 0x0f 0x50 - invalid */
5524
5525
5526/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5527FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5528{
5529 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5530 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5531}
5532
5533
5534/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5535FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5536{
5537 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5538 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5539}
5540
5541
5542/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5543FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5544{
5545 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5546 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5547}
5548
5549
5550/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5551FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5552{
5553 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5554 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5555}
5556
5557
5558/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5559FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5560{
5561 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5562 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5563}
5564
5565
5566/* Opcode 0x66 0x0f 0x52 - invalid */
5567
5568
5569/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5570FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5571{
5572 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5573 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5574}
5575
5576
5577/* Opcode 0xf2 0x0f 0x52 - invalid */
5578
5579/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5580FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5581/* Opcode 0x66 0x0f 0x53 - invalid */
5582/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5583FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5584/* Opcode 0xf2 0x0f 0x53 - invalid */
5585
5586
5587/** Opcode 0x0f 0x54 - andps Vps, Wps */
5588FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5589{
5590 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5591 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5592}
5593
5594
5595/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5596FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5597{
5598 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5599 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5600}
5601
5602
5603/* Opcode 0xf3 0x0f 0x54 - invalid */
5604/* Opcode 0xf2 0x0f 0x54 - invalid */
5605
5606
5607/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5608FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5609{
5610 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5611 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5612}
5613
5614
5615/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5616FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5617{
5618 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5619 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5620}
5621
5622
5623/* Opcode 0xf3 0x0f 0x55 - invalid */
5624/* Opcode 0xf2 0x0f 0x55 - invalid */
5625
5626
5627/** Opcode 0x0f 0x56 - orps Vps, Wps */
5628FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5629{
5630 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5631 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5632}
5633
5634
5635/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5636FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5637{
5638 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5639 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5640}
5641
5642
5643/* Opcode 0xf3 0x0f 0x56 - invalid */
5644/* Opcode 0xf2 0x0f 0x56 - invalid */
5645
5646
5647/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5648FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5649{
5650 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5651 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5652}
5653
5654
5655/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5656FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5657{
5658 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5659 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5660}
5661
5662
5663/* Opcode 0xf3 0x0f 0x57 - invalid */
5664/* Opcode 0xf2 0x0f 0x57 - invalid */
5665
5666/** Opcode 0x0f 0x58 - addps Vps, Wps */
5667FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5668{
5669 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5670 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5671}
5672
5673
5674/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5675FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5676{
5677 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5678 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5679}
5680
5681
5682/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5683FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5684{
5685 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5686 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5687}
5688
5689
5690/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5691FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5692{
5693 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5694 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5695}
5696
5697
5698/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5699FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5700{
5701 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5702 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5703}
5704
5705
5706/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5707FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5708{
5709 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5710 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5711}
5712
5713
5714/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5715FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5716{
5717 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5718 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5719}
5720
5721
5722/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5723FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5724{
5725 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5726 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5727}
5728
5729
5730/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5731FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5732{
5733 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5734 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5735}
5736
5737
5738/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5739FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5740{
5741 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5742 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5743}
5744
5745
5746/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5747FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5748{
5749 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5750 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5751}
5752
5753
5754/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5755FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5756{
5757 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5758 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5759}
5760
5761
5762/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5763FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5764{
5765 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5766 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5767}
5768
5769
5770/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5771FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5772{
5773 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5774 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5775}
5776
5777
5778/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5779FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5780{
5781 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5782 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5783}
5784
5785
5786/* Opcode 0xf2 0x0f 0x5b - invalid */
5787
5788
5789/** Opcode 0x0f 0x5c - subps Vps, Wps */
5790FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5791{
5792 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5793 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5794}
5795
5796
5797/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5798FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5799{
5800 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5801 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5802}
5803
5804
5805/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5806FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5807{
5808 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5809 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5810}
5811
5812
5813/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5814FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5815{
5816 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5817 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5818}
5819
5820
5821/** Opcode 0x0f 0x5d - minps Vps, Wps */
5822FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5823{
5824 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5825 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5826}
5827
5828
5829/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5830FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5831{
5832 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5833 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5834}
5835
5836
5837/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5838FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5839{
5840 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5841 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5842}
5843
5844
5845/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5846FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5847{
5848 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5849 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5850}
5851
5852
5853/** Opcode 0x0f 0x5e - divps Vps, Wps */
5854FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5855{
5856 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5857 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5858}
5859
5860
5861/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5862FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5863{
5864 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5865 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5866}
5867
5868
5869/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5870FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5871{
5872 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5873 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5874}
5875
5876
5877/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5878FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5879{
5880 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5881 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5882}
5883
5884
5885/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5886FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5887{
5888 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5889 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5890}
5891
5892
5893/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5894FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5895{
5896 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5897 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5898}
5899
5900
5901/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5902FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5903{
5904 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5905 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5906}
5907
5908
5909/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5910FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5911{
5912 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5913 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5914}
5915
5916
5917/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5918FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5919{
5920 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5921 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5922}
5923
5924
5925/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5926FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5927{
5928 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5929 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5930}
5931
5932
5933/* Opcode 0xf3 0x0f 0x60 - invalid */
5934
5935
5936/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5937FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5938{
5939 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5940 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5941 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5942}
5943
5944
5945/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5946FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5947{
5948 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5949 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5950}
5951
5952
5953/* Opcode 0xf3 0x0f 0x61 - invalid */
5954
5955
5956/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5957FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5958{
5959 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5960 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5961}
5962
5963
5964/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5965FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5966{
5967 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5968 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5969}
5970
5971
5972/* Opcode 0xf3 0x0f 0x62 - invalid */
5973
5974
5975
5976/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5977FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5978{
5979 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5980 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5981}
5982
5983
5984/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5985FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5986{
5987 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5988 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5989}
5990
5991
5992/* Opcode 0xf3 0x0f 0x63 - invalid */
5993
5994
5995/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5996FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5997{
5998 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5999 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6000}
6001
6002
6003/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6004FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6005{
6006 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6007 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6008}
6009
6010
6011/* Opcode 0xf3 0x0f 0x64 - invalid */
6012
6013
6014/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6015FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6016{
6017 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6018 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6019}
6020
6021
6022/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6023FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6024{
6025 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6026 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6027}
6028
6029
6030/* Opcode 0xf3 0x0f 0x65 - invalid */
6031
6032
6033/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6034FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6035{
6036 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6037 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6038}
6039
6040
6041/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6042FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6043{
6044 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6045 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6046}
6047
6048
6049/* Opcode 0xf3 0x0f 0x66 - invalid */
6050
6051
6052/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6053FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6054{
6055 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6056 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6057}
6058
6059
6060/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6061FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6062{
6063 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6064 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6065}
6066
6067
6068/* Opcode 0xf3 0x0f 0x67 - invalid */
6069
6070
6071/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6072 * @note Intel and AMD both uses Qd for the second parameter, however they
6073 * both list it as a mmX/mem64 operand and intel describes it as being
6074 * loaded as a qword, so it should be Qq, shouldn't it? */
6075FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6076{
6077 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6078 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6079}
6080
6081
6082/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6083FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6084{
6085 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6086 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6087}
6088
6089
6090/* Opcode 0xf3 0x0f 0x68 - invalid */
6091
6092
6093/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6094 * @note Intel and AMD both uses Qd for the second parameter, however they
6095 * both list it as a mmX/mem64 operand and intel describes it as being
6096 * loaded as a qword, so it should be Qq, shouldn't it? */
6097FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6098{
6099 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6100 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6101}
6102
6103
6104/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6105FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6106{
6107 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6108 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6109
6110}
6111
6112
6113/* Opcode 0xf3 0x0f 0x69 - invalid */
6114
6115
6116/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6117 * @note Intel and AMD both uses Qd for the second parameter, however they
6118 * both list it as a mmX/mem64 operand and intel describes it as being
6119 * loaded as a qword, so it should be Qq, shouldn't it? */
6120FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6121{
6122 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6123 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6124}
6125
6126
6127/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6128FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6129{
6130 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6131 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6132}
6133
6134
6135/* Opcode 0xf3 0x0f 0x6a - invalid */
6136
6137
6138/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6139FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6140{
6141 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6142 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6143}
6144
6145
6146/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6147FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6148{
6149 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6150 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6151}
6152
6153
6154/* Opcode 0xf3 0x0f 0x6b - invalid */
6155
6156
6157/* Opcode 0x0f 0x6c - invalid */
6158
6159
6160/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6161FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6162{
6163 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6164 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6165}
6166
6167
6168/* Opcode 0xf3 0x0f 0x6c - invalid */
6169/* Opcode 0xf2 0x0f 0x6c - invalid */
6170
6171
6172/* Opcode 0x0f 0x6d - invalid */
6173
6174
6175/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6176FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6177{
6178 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6179 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6180}
6181
6182
6183/* Opcode 0xf3 0x0f 0x6d - invalid */
6184
6185
6186FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6187{
6188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6189 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6190 {
6191 /**
6192 * @opcode 0x6e
6193 * @opcodesub rex.w=1
6194 * @oppfx none
6195 * @opcpuid mmx
6196 * @opgroup og_mmx_datamove
6197 * @opxcpttype 5
6198 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6199 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6200 */
6201 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6202 if (IEM_IS_MODRM_REG_MODE(bRm))
6203 {
6204 /* MMX, greg64 */
6205 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6207 IEM_MC_LOCAL(uint64_t, u64Tmp);
6208
6209 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6210 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6211 IEM_MC_FPU_TO_MMX_MODE();
6212
6213 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6214 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6215
6216 IEM_MC_ADVANCE_RIP_AND_FINISH();
6217 IEM_MC_END();
6218 }
6219 else
6220 {
6221 /* MMX, [mem64] */
6222 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6224 IEM_MC_LOCAL(uint64_t, u64Tmp);
6225
6226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6228 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6229 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6230 IEM_MC_FPU_TO_MMX_MODE();
6231
6232 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6233 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6234
6235 IEM_MC_ADVANCE_RIP_AND_FINISH();
6236 IEM_MC_END();
6237 }
6238 }
6239 else
6240 {
6241 /**
6242 * @opdone
6243 * @opcode 0x6e
6244 * @opcodesub rex.w=0
6245 * @oppfx none
6246 * @opcpuid mmx
6247 * @opgroup og_mmx_datamove
6248 * @opxcpttype 5
6249 * @opfunction iemOp_movd_q_Pd_Ey
6250 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6251 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6252 */
6253 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6254 if (IEM_IS_MODRM_REG_MODE(bRm))
6255 {
6256 /* MMX, greg32 */
6257 IEM_MC_BEGIN(0, 1, 0, 0);
6258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6259 IEM_MC_LOCAL(uint32_t, u32Tmp);
6260
6261 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6262 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6263 IEM_MC_FPU_TO_MMX_MODE();
6264
6265 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6266 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6267
6268 IEM_MC_ADVANCE_RIP_AND_FINISH();
6269 IEM_MC_END();
6270 }
6271 else
6272 {
6273 /* MMX, [mem32] */
6274 IEM_MC_BEGIN(0, 2, 0, 0);
6275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6276 IEM_MC_LOCAL(uint32_t, u32Tmp);
6277
6278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6280 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6281 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6282 IEM_MC_FPU_TO_MMX_MODE();
6283
6284 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6285 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6286
6287 IEM_MC_ADVANCE_RIP_AND_FINISH();
6288 IEM_MC_END();
6289 }
6290 }
6291}
6292
6293FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6294{
6295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6296 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6297 {
6298 /**
6299 * @opcode 0x6e
6300 * @opcodesub rex.w=1
6301 * @oppfx 0x66
6302 * @opcpuid sse2
6303 * @opgroup og_sse2_simdint_datamove
6304 * @opxcpttype 5
6305 * @optest 64-bit / op1=1 op2=2 -> op1=2
6306 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6307 */
6308 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6309 if (IEM_IS_MODRM_REG_MODE(bRm))
6310 {
6311 /* XMM, greg64 */
6312 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6314 IEM_MC_LOCAL(uint64_t, u64Tmp);
6315
6316 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6317 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6318
6319 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6320 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6321
6322 IEM_MC_ADVANCE_RIP_AND_FINISH();
6323 IEM_MC_END();
6324 }
6325 else
6326 {
6327 /* XMM, [mem64] */
6328 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6330 IEM_MC_LOCAL(uint64_t, u64Tmp);
6331
6332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6334 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6335 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6336
6337 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6338 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6339
6340 IEM_MC_ADVANCE_RIP_AND_FINISH();
6341 IEM_MC_END();
6342 }
6343 }
6344 else
6345 {
6346 /**
6347 * @opdone
6348 * @opcode 0x6e
6349 * @opcodesub rex.w=0
6350 * @oppfx 0x66
6351 * @opcpuid sse2
6352 * @opgroup og_sse2_simdint_datamove
6353 * @opxcpttype 5
6354 * @opfunction iemOp_movd_q_Vy_Ey
6355 * @optest op1=1 op2=2 -> op1=2
6356 * @optest op1=0 op2=-42 -> op1=-42
6357 */
6358 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6359 if (IEM_IS_MODRM_REG_MODE(bRm))
6360 {
6361 /* XMM, greg32 */
6362 IEM_MC_BEGIN(0, 1, 0, 0);
6363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6364 IEM_MC_LOCAL(uint32_t, u32Tmp);
6365
6366 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6367 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6368
6369 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6370 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6371
6372 IEM_MC_ADVANCE_RIP_AND_FINISH();
6373 IEM_MC_END();
6374 }
6375 else
6376 {
6377 /* XMM, [mem32] */
6378 IEM_MC_BEGIN(0, 2, 0, 0);
6379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6380 IEM_MC_LOCAL(uint32_t, u32Tmp);
6381
6382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6385 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6386
6387 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6388 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6389
6390 IEM_MC_ADVANCE_RIP_AND_FINISH();
6391 IEM_MC_END();
6392 }
6393 }
6394}
6395
6396/* Opcode 0xf3 0x0f 0x6e - invalid */
6397
6398
6399/**
6400 * @opcode 0x6f
6401 * @oppfx none
6402 * @opcpuid mmx
6403 * @opgroup og_mmx_datamove
6404 * @opxcpttype 5
6405 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6406 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6407 */
6408FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6409{
6410 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6412 if (IEM_IS_MODRM_REG_MODE(bRm))
6413 {
6414 /*
6415 * Register, register.
6416 */
6417 IEM_MC_BEGIN(0, 1, 0, 0);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6419 IEM_MC_LOCAL(uint64_t, u64Tmp);
6420
6421 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6422 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6423 IEM_MC_FPU_TO_MMX_MODE();
6424
6425 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6426 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6427
6428 IEM_MC_ADVANCE_RIP_AND_FINISH();
6429 IEM_MC_END();
6430 }
6431 else
6432 {
6433 /*
6434 * Register, memory.
6435 */
6436 IEM_MC_BEGIN(0, 2, 0, 0);
6437 IEM_MC_LOCAL(uint64_t, u64Tmp);
6438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6439
6440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6442 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6443 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6444 IEM_MC_FPU_TO_MMX_MODE();
6445
6446 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6447 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6448
6449 IEM_MC_ADVANCE_RIP_AND_FINISH();
6450 IEM_MC_END();
6451 }
6452}
6453
6454/**
6455 * @opcode 0x6f
6456 * @oppfx 0x66
6457 * @opcpuid sse2
6458 * @opgroup og_sse2_simdint_datamove
6459 * @opxcpttype 1
6460 * @optest op1=1 op2=2 -> op1=2
6461 * @optest op1=0 op2=-42 -> op1=-42
6462 */
6463FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6464{
6465 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6467 if (IEM_IS_MODRM_REG_MODE(bRm))
6468 {
6469 /*
6470 * Register, register.
6471 */
6472 IEM_MC_BEGIN(0, 0, 0, 0);
6473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6474
6475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6476 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6477
6478 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6479 IEM_GET_MODRM_RM(pVCpu, bRm));
6480 IEM_MC_ADVANCE_RIP_AND_FINISH();
6481 IEM_MC_END();
6482 }
6483 else
6484 {
6485 /*
6486 * Register, memory.
6487 */
6488 IEM_MC_BEGIN(0, 2, 0, 0);
6489 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6491
6492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6494 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6496
6497 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6498 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6499
6500 IEM_MC_ADVANCE_RIP_AND_FINISH();
6501 IEM_MC_END();
6502 }
6503}
6504
6505/**
6506 * @opcode 0x6f
6507 * @oppfx 0xf3
6508 * @opcpuid sse2
6509 * @opgroup og_sse2_simdint_datamove
6510 * @opxcpttype 4UA
6511 * @optest op1=1 op2=2 -> op1=2
6512 * @optest op1=0 op2=-42 -> op1=-42
6513 */
6514FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6515{
6516 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6518 if (IEM_IS_MODRM_REG_MODE(bRm))
6519 {
6520 /*
6521 * Register, register.
6522 */
6523 IEM_MC_BEGIN(0, 0, 0, 0);
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6525 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6526 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6527 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6528 IEM_GET_MODRM_RM(pVCpu, bRm));
6529 IEM_MC_ADVANCE_RIP_AND_FINISH();
6530 IEM_MC_END();
6531 }
6532 else
6533 {
6534 /*
6535 * Register, memory.
6536 */
6537 IEM_MC_BEGIN(0, 2, 0, 0);
6538 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6540
6541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6543 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6544 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6545 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6546 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6547
6548 IEM_MC_ADVANCE_RIP_AND_FINISH();
6549 IEM_MC_END();
6550 }
6551}
6552
6553
6554/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6555FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6556{
6557 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6559 if (IEM_IS_MODRM_REG_MODE(bRm))
6560 {
6561 /*
6562 * Register, register.
6563 */
6564 IEM_MC_BEGIN(3, 0, 0, 0);
6565 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6567 IEM_MC_ARG(uint64_t *, pDst, 0);
6568 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6569 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6570 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6571 IEM_MC_PREPARE_FPU_USAGE();
6572 IEM_MC_FPU_TO_MMX_MODE();
6573
6574 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6575 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6576 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6577 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6578
6579 IEM_MC_ADVANCE_RIP_AND_FINISH();
6580 IEM_MC_END();
6581 }
6582 else
6583 {
6584 /*
6585 * Register, memory.
6586 */
6587 IEM_MC_BEGIN(3, 2, 0, 0);
6588 IEM_MC_ARG(uint64_t *, pDst, 0);
6589 IEM_MC_LOCAL(uint64_t, uSrc);
6590 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6592
6593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6594 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6595 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6597 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6598 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6599
6600 IEM_MC_PREPARE_FPU_USAGE();
6601 IEM_MC_FPU_TO_MMX_MODE();
6602
6603 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6604 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6605 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6606
6607 IEM_MC_ADVANCE_RIP_AND_FINISH();
6608 IEM_MC_END();
6609 }
6610}
6611
6612
6613/**
6614 * Common worker for SSE2 instructions on the forms:
6615 * pshufd xmm1, xmm2/mem128, imm8
6616 * pshufhw xmm1, xmm2/mem128, imm8
6617 * pshuflw xmm1, xmm2/mem128, imm8
6618 *
6619 * Proper alignment of the 128-bit operand is enforced.
6620 * Exceptions type 4. SSE2 cpuid checks.
6621 */
6622FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6623{
6624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6625 if (IEM_IS_MODRM_REG_MODE(bRm))
6626 {
6627 /*
6628 * Register, register.
6629 */
6630 IEM_MC_BEGIN(3, 0, 0, 0);
6631 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6633 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6634 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6635 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6636 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6637 IEM_MC_PREPARE_SSE_USAGE();
6638 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6639 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6640 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6641 IEM_MC_ADVANCE_RIP_AND_FINISH();
6642 IEM_MC_END();
6643 }
6644 else
6645 {
6646 /*
6647 * Register, memory.
6648 */
6649 IEM_MC_BEGIN(3, 2, 0, 0);
6650 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6651 IEM_MC_LOCAL(RTUINT128U, uSrc);
6652 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6654
6655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6656 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6657 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6659 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6660
6661 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6662 IEM_MC_PREPARE_SSE_USAGE();
6663 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6664 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6665
6666 IEM_MC_ADVANCE_RIP_AND_FINISH();
6667 IEM_MC_END();
6668 }
6669}
6670
6671
6672/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6673FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6674{
6675 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6676 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6677}
6678
6679
6680/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6681FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6682{
6683 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6684 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6685}
6686
6687
6688/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6689FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6690{
6691 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6692 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6693}
6694
6695
6696/**
6697 * Common worker for MMX instructions of the form:
6698 * psrlw mm, imm8
6699 * psraw mm, imm8
6700 * psllw mm, imm8
6701 * psrld mm, imm8
6702 * psrad mm, imm8
6703 * pslld mm, imm8
6704 * psrlq mm, imm8
6705 * psllq mm, imm8
6706 *
6707 */
6708FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6709{
6710 if (IEM_IS_MODRM_REG_MODE(bRm))
6711 {
6712 /*
6713 * Register, immediate.
6714 */
6715 IEM_MC_BEGIN(2, 0, 0, 0);
6716 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6718 IEM_MC_ARG(uint64_t *, pDst, 0);
6719 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6720 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6721 IEM_MC_PREPARE_FPU_USAGE();
6722 IEM_MC_FPU_TO_MMX_MODE();
6723
6724 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6725 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6726 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6727
6728 IEM_MC_ADVANCE_RIP_AND_FINISH();
6729 IEM_MC_END();
6730 }
6731 else
6732 {
6733 /*
6734 * Register, memory not supported.
6735 */
6736 /// @todo Caller already enforced register mode?!
6737 AssertFailedReturn(VINF_SUCCESS);
6738 }
6739}
6740
6741
6742/**
6743 * Common worker for SSE2 instructions of the form:
6744 * psrlw xmm, imm8
6745 * psraw xmm, imm8
6746 * psllw xmm, imm8
6747 * psrld xmm, imm8
6748 * psrad xmm, imm8
6749 * pslld xmm, imm8
6750 * psrlq xmm, imm8
6751 * psllq xmm, imm8
6752 *
6753 */
6754FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6755{
6756 if (IEM_IS_MODRM_REG_MODE(bRm))
6757 {
6758 /*
6759 * Register, immediate.
6760 */
6761 IEM_MC_BEGIN(2, 0, 0, 0);
6762 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6764 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6765 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6766 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6767 IEM_MC_PREPARE_SSE_USAGE();
6768 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6769 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6770 IEM_MC_ADVANCE_RIP_AND_FINISH();
6771 IEM_MC_END();
6772 }
6773 else
6774 {
6775 /*
6776 * Register, memory.
6777 */
6778 /// @todo Caller already enforced register mode?!
6779 AssertFailedReturn(VINF_SUCCESS);
6780 }
6781}
6782
6783
6784/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6785FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6786{
6787// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6788 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6789}
6790
6791
6792/** Opcode 0x66 0x0f 0x71 11/2. */
6793FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6794{
6795// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6796 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6797}
6798
6799
6800/** Opcode 0x0f 0x71 11/4. */
6801FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6802{
6803// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6804 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6805}
6806
6807
6808/** Opcode 0x66 0x0f 0x71 11/4. */
6809FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6810{
6811// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6812 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6813}
6814
6815
6816/** Opcode 0x0f 0x71 11/6. */
6817FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6818{
6819// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6820 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6821}
6822
6823
6824/** Opcode 0x66 0x0f 0x71 11/6. */
6825FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6826{
6827// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6828 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6829}
6830
6831
6832/**
6833 * Group 12 jump table for register variant.
6834 */
6835IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6836{
6837 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6838 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6839 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6840 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6841 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6842 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6843 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6844 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6845};
6846AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6847
6848
6849/** Opcode 0x0f 0x71. */
6850FNIEMOP_DEF(iemOp_Grp12)
6851{
6852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6853 if (IEM_IS_MODRM_REG_MODE(bRm))
6854 /* register, register */
6855 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6856 + pVCpu->iem.s.idxPrefix], bRm);
6857 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6858}
6859
6860
6861/** Opcode 0x0f 0x72 11/2. */
6862FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6863{
6864// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6865 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6866}
6867
6868
6869/** Opcode 0x66 0x0f 0x72 11/2. */
6870FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6871{
6872// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6873 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6874}
6875
6876
6877/** Opcode 0x0f 0x72 11/4. */
6878FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6879{
6880// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6881 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6882}
6883
6884
6885/** Opcode 0x66 0x0f 0x72 11/4. */
6886FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6887{
6888// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6889 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6890}
6891
6892
6893/** Opcode 0x0f 0x72 11/6. */
6894FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6895{
6896// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6897 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6898}
6899
6900/** Opcode 0x66 0x0f 0x72 11/6. */
6901FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6902{
6903// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6904 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6905}
6906
6907
6908/**
6909 * Group 13 jump table for register variant.
6910 */
6911IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6912{
6913 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6914 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6915 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6916 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6917 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6918 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6919 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6920 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6921};
6922AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6923
6924/** Opcode 0x0f 0x72. */
6925FNIEMOP_DEF(iemOp_Grp13)
6926{
6927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6928 if (IEM_IS_MODRM_REG_MODE(bRm))
6929 /* register, register */
6930 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6931 + pVCpu->iem.s.idxPrefix], bRm);
6932 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6933}
6934
6935
6936/** Opcode 0x0f 0x73 11/2. */
6937FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6938{
6939// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6940 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6941}
6942
6943
6944/** Opcode 0x66 0x0f 0x73 11/2. */
6945FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6946{
6947// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6948 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6949}
6950
6951
6952/** Opcode 0x66 0x0f 0x73 11/3. */
6953FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6954{
6955// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6956 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6957}
6958
6959
6960/** Opcode 0x0f 0x73 11/6. */
6961FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6962{
6963// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6964 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6965}
6966
6967
6968/** Opcode 0x66 0x0f 0x73 11/6. */
6969FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6970{
6971// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6972 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6973}
6974
6975
6976/** Opcode 0x66 0x0f 0x73 11/7. */
6977FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6978{
6979// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6980 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6981}
6982
6983/**
6984 * Group 14 jump table for register variant.
6985 */
6986IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6987{
6988 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6989 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6990 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6991 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6992 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6993 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6994 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6995 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6996};
6997AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6998
6999
7000/** Opcode 0x0f 0x73. */
7001FNIEMOP_DEF(iemOp_Grp14)
7002{
7003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7004 if (IEM_IS_MODRM_REG_MODE(bRm))
7005 /* register, register */
7006 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7007 + pVCpu->iem.s.idxPrefix], bRm);
7008 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7009}
7010
7011
7012/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7013FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7014{
7015 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7016 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7017}
7018
7019
7020/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7021FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7022{
7023 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7024 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7025}
7026
7027
7028/* Opcode 0xf3 0x0f 0x74 - invalid */
7029/* Opcode 0xf2 0x0f 0x74 - invalid */
7030
7031
7032/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7033FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7034{
7035 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7036 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7037}
7038
7039
7040/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7041FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7042{
7043 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7044 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7045}
7046
7047
7048/* Opcode 0xf3 0x0f 0x75 - invalid */
7049/* Opcode 0xf2 0x0f 0x75 - invalid */
7050
7051
7052/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7053FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7054{
7055 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7056 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7057}
7058
7059
7060/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7061FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7062{
7063 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7064 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7065}
7066
7067
7068/* Opcode 0xf3 0x0f 0x76 - invalid */
7069/* Opcode 0xf2 0x0f 0x76 - invalid */
7070
7071
7072/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7073FNIEMOP_DEF(iemOp_emms)
7074{
7075 IEMOP_MNEMONIC(emms, "emms");
7076 IEM_MC_BEGIN(0, 0, 0, 0);
7077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7079 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7080 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7081 IEM_MC_FPU_FROM_MMX_MODE();
7082 IEM_MC_ADVANCE_RIP_AND_FINISH();
7083 IEM_MC_END();
7084}
7085
7086/* Opcode 0x66 0x0f 0x77 - invalid */
7087/* Opcode 0xf3 0x0f 0x77 - invalid */
7088/* Opcode 0xf2 0x0f 0x77 - invalid */
7089
7090/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7091#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7092FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7093{
7094 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7095 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7096 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7097 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7098
7099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7100 if (IEM_IS_MODRM_REG_MODE(bRm))
7101 {
7102 /*
7103 * Register, register.
7104 */
7105 if (enmEffOpSize == IEMMODE_64BIT)
7106 {
7107 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7108 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7109 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7110 IEM_MC_ARG(uint64_t, u64Enc, 1);
7111 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7112 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7113 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7114 IEM_MC_END();
7115 }
7116 else
7117 {
7118 IEM_MC_BEGIN(2, 0, 0, 0);
7119 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7120 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7121 IEM_MC_ARG(uint32_t, u32Enc, 1);
7122 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7123 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7124 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7125 IEM_MC_END();
7126 }
7127 }
7128 else
7129 {
7130 /*
7131 * Memory, register.
7132 */
7133 if (enmEffOpSize == IEMMODE_64BIT)
7134 {
7135 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7136 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7137 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7138 IEM_MC_ARG(uint64_t, u64Enc, 2);
7139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7140 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7141 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7142 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7143 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7144 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7145 IEM_MC_END();
7146 }
7147 else
7148 {
7149 IEM_MC_BEGIN(3, 0, 0, 0);
7150 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7151 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7152 IEM_MC_ARG(uint32_t, u32Enc, 2);
7153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7154 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7155 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7156 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7157 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7158 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7159 IEM_MC_END();
7160 }
7161 }
7162}
7163#else
7164FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7165#endif
7166
7167/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7168FNIEMOP_STUB(iemOp_AmdGrp17);
7169/* Opcode 0xf3 0x0f 0x78 - invalid */
7170/* Opcode 0xf2 0x0f 0x78 - invalid */
7171
7172/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7173#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7174FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7175{
7176 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7177 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7178 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7179 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7180
7181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7182 if (IEM_IS_MODRM_REG_MODE(bRm))
7183 {
7184 /*
7185 * Register, register.
7186 */
7187 if (enmEffOpSize == IEMMODE_64BIT)
7188 {
7189 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7190 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7191 IEM_MC_ARG(uint64_t, u64Val, 0);
7192 IEM_MC_ARG(uint64_t, u64Enc, 1);
7193 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7194 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7195 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7196 IEM_MC_END();
7197 }
7198 else
7199 {
7200 IEM_MC_BEGIN(2, 0, 0, 0);
7201 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7202 IEM_MC_ARG(uint32_t, u32Val, 0);
7203 IEM_MC_ARG(uint32_t, u32Enc, 1);
7204 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7205 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7206 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7207 IEM_MC_END();
7208 }
7209 }
7210 else
7211 {
7212 /*
7213 * Register, memory.
7214 */
7215 if (enmEffOpSize == IEMMODE_64BIT)
7216 {
7217 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7218 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7219 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7220 IEM_MC_ARG(uint64_t, u64Enc, 2);
7221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7222 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7223 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7224 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7225 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7226 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7227 IEM_MC_END();
7228 }
7229 else
7230 {
7231 IEM_MC_BEGIN(3, 0, 0, 0);
7232 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7233 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7234 IEM_MC_ARG(uint32_t, u32Enc, 2);
7235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7236 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7237 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7238 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7239 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7240 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7241 IEM_MC_END();
7242 }
7243 }
7244}
7245#else
7246FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7247#endif
7248/* Opcode 0x66 0x0f 0x79 - invalid */
7249/* Opcode 0xf3 0x0f 0x79 - invalid */
7250/* Opcode 0xf2 0x0f 0x79 - invalid */
7251
7252/* Opcode 0x0f 0x7a - invalid */
7253/* Opcode 0x66 0x0f 0x7a - invalid */
7254/* Opcode 0xf3 0x0f 0x7a - invalid */
7255/* Opcode 0xf2 0x0f 0x7a - invalid */
7256
7257/* Opcode 0x0f 0x7b - invalid */
7258/* Opcode 0x66 0x0f 0x7b - invalid */
7259/* Opcode 0xf3 0x0f 0x7b - invalid */
7260/* Opcode 0xf2 0x0f 0x7b - invalid */
7261
7262/* Opcode 0x0f 0x7c - invalid */
7263
7264
7265/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7266FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7267{
7268 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7269 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7270}
7271
7272
7273/* Opcode 0xf3 0x0f 0x7c - invalid */
7274
7275
7276/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7277FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7278{
7279 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7280 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7281}
7282
7283
7284/* Opcode 0x0f 0x7d - invalid */
7285
7286
7287/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7288FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7289{
7290 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7291 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7292}
7293
7294
7295/* Opcode 0xf3 0x0f 0x7d - invalid */
7296
7297
7298/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7299FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7300{
7301 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7302 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7303}
7304
7305
7306/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7307FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7308{
7309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7310 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7311 {
7312 /**
7313 * @opcode 0x7e
7314 * @opcodesub rex.w=1
7315 * @oppfx none
7316 * @opcpuid mmx
7317 * @opgroup og_mmx_datamove
7318 * @opxcpttype 5
7319 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7320 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7321 */
7322 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7323 if (IEM_IS_MODRM_REG_MODE(bRm))
7324 {
7325 /* greg64, MMX */
7326 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7328 IEM_MC_LOCAL(uint64_t, u64Tmp);
7329
7330 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7331 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7332 IEM_MC_FPU_TO_MMX_MODE();
7333
7334 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7335 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7336
7337 IEM_MC_ADVANCE_RIP_AND_FINISH();
7338 IEM_MC_END();
7339 }
7340 else
7341 {
7342 /* [mem64], MMX */
7343 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7345 IEM_MC_LOCAL(uint64_t, u64Tmp);
7346
7347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7349 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7350 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7351 IEM_MC_FPU_TO_MMX_MODE();
7352
7353 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7354 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7355
7356 IEM_MC_ADVANCE_RIP_AND_FINISH();
7357 IEM_MC_END();
7358 }
7359 }
7360 else
7361 {
7362 /**
7363 * @opdone
7364 * @opcode 0x7e
7365 * @opcodesub rex.w=0
7366 * @oppfx none
7367 * @opcpuid mmx
7368 * @opgroup og_mmx_datamove
7369 * @opxcpttype 5
7370 * @opfunction iemOp_movd_q_Pd_Ey
7371 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7372 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7373 */
7374 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7375 if (IEM_IS_MODRM_REG_MODE(bRm))
7376 {
7377 /* greg32, MMX */
7378 IEM_MC_BEGIN(0, 1, 0, 0);
7379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7380 IEM_MC_LOCAL(uint32_t, u32Tmp);
7381
7382 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7383 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7384 IEM_MC_FPU_TO_MMX_MODE();
7385
7386 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7387 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7388
7389 IEM_MC_ADVANCE_RIP_AND_FINISH();
7390 IEM_MC_END();
7391 }
7392 else
7393 {
7394 /* [mem32], MMX */
7395 IEM_MC_BEGIN(0, 2, 0, 0);
7396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7397 IEM_MC_LOCAL(uint32_t, u32Tmp);
7398
7399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7401 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7402 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7403 IEM_MC_FPU_TO_MMX_MODE();
7404
7405 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7406 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7407
7408 IEM_MC_ADVANCE_RIP_AND_FINISH();
7409 IEM_MC_END();
7410 }
7411 }
7412}
7413
7414
7415FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7416{
7417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7418 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7419 {
7420 /**
7421 * @opcode 0x7e
7422 * @opcodesub rex.w=1
7423 * @oppfx 0x66
7424 * @opcpuid sse2
7425 * @opgroup og_sse2_simdint_datamove
7426 * @opxcpttype 5
7427 * @optest 64-bit / op1=1 op2=2 -> op1=2
7428 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7429 */
7430 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7431 if (IEM_IS_MODRM_REG_MODE(bRm))
7432 {
7433 /* greg64, XMM */
7434 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7436 IEM_MC_LOCAL(uint64_t, u64Tmp);
7437
7438 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7439 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7440
7441 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7442 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7443
7444 IEM_MC_ADVANCE_RIP_AND_FINISH();
7445 IEM_MC_END();
7446 }
7447 else
7448 {
7449 /* [mem64], XMM */
7450 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7452 IEM_MC_LOCAL(uint64_t, u64Tmp);
7453
7454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7456 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7457 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7458
7459 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7460 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7461
7462 IEM_MC_ADVANCE_RIP_AND_FINISH();
7463 IEM_MC_END();
7464 }
7465 }
7466 else
7467 {
7468 /**
7469 * @opdone
7470 * @opcode 0x7e
7471 * @opcodesub rex.w=0
7472 * @oppfx 0x66
7473 * @opcpuid sse2
7474 * @opgroup og_sse2_simdint_datamove
7475 * @opxcpttype 5
7476 * @opfunction iemOp_movd_q_Vy_Ey
7477 * @optest op1=1 op2=2 -> op1=2
7478 * @optest op1=0 op2=-42 -> op1=-42
7479 */
7480 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7481 if (IEM_IS_MODRM_REG_MODE(bRm))
7482 {
7483 /* greg32, XMM */
7484 IEM_MC_BEGIN(0, 1, 0, 0);
7485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7486 IEM_MC_LOCAL(uint32_t, u32Tmp);
7487
7488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7489 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7490
7491 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7492 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7493
7494 IEM_MC_ADVANCE_RIP_AND_FINISH();
7495 IEM_MC_END();
7496 }
7497 else
7498 {
7499 /* [mem32], XMM */
7500 IEM_MC_BEGIN(0, 2, 0, 0);
7501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7502 IEM_MC_LOCAL(uint32_t, u32Tmp);
7503
7504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7507 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7508
7509 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7510 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7511
7512 IEM_MC_ADVANCE_RIP_AND_FINISH();
7513 IEM_MC_END();
7514 }
7515 }
7516}
7517
7518/**
7519 * @opcode 0x7e
7520 * @oppfx 0xf3
7521 * @opcpuid sse2
7522 * @opgroup og_sse2_pcksclr_datamove
7523 * @opxcpttype none
7524 * @optest op1=1 op2=2 -> op1=2
7525 * @optest op1=0 op2=-42 -> op1=-42
7526 */
7527FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7528{
7529 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7531 if (IEM_IS_MODRM_REG_MODE(bRm))
7532 {
7533 /*
7534 * XMM128, XMM64.
7535 */
7536 IEM_MC_BEGIN(0, 2, 0, 0);
7537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7538 IEM_MC_LOCAL(uint64_t, uSrc);
7539
7540 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7541 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7542
7543 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7544 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7545
7546 IEM_MC_ADVANCE_RIP_AND_FINISH();
7547 IEM_MC_END();
7548 }
7549 else
7550 {
7551 /*
7552 * XMM128, [mem64].
7553 */
7554 IEM_MC_BEGIN(0, 2, 0, 0);
7555 IEM_MC_LOCAL(uint64_t, uSrc);
7556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7557
7558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7560 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7561 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7562
7563 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7564 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7565
7566 IEM_MC_ADVANCE_RIP_AND_FINISH();
7567 IEM_MC_END();
7568 }
7569}
7570
7571/* Opcode 0xf2 0x0f 0x7e - invalid */
7572
7573
7574/** Opcode 0x0f 0x7f - movq Qq, Pq */
7575FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7576{
7577 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7579 if (IEM_IS_MODRM_REG_MODE(bRm))
7580 {
7581 /*
7582 * MMX, MMX.
7583 */
7584 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7585 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7586 IEM_MC_BEGIN(0, 1, 0, 0);
7587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7588 IEM_MC_LOCAL(uint64_t, u64Tmp);
7589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7590 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7591 IEM_MC_FPU_TO_MMX_MODE();
7592
7593 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7594 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7595
7596 IEM_MC_ADVANCE_RIP_AND_FINISH();
7597 IEM_MC_END();
7598 }
7599 else
7600 {
7601 /*
7602 * [mem64], MMX.
7603 */
7604 IEM_MC_BEGIN(0, 2, 0, 0);
7605 IEM_MC_LOCAL(uint64_t, u64Tmp);
7606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7607
7608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7610 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7611 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7612 IEM_MC_FPU_TO_MMX_MODE();
7613
7614 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7615 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7616
7617 IEM_MC_ADVANCE_RIP_AND_FINISH();
7618 IEM_MC_END();
7619 }
7620}
7621
7622/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7623FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7624{
7625 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7627 if (IEM_IS_MODRM_REG_MODE(bRm))
7628 {
7629 /*
7630 * XMM, XMM.
7631 */
7632 IEM_MC_BEGIN(0, 0, 0, 0);
7633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7634 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7635 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7636 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7637 IEM_GET_MODRM_REG(pVCpu, bRm));
7638 IEM_MC_ADVANCE_RIP_AND_FINISH();
7639 IEM_MC_END();
7640 }
7641 else
7642 {
7643 /*
7644 * [mem128], XMM.
7645 */
7646 IEM_MC_BEGIN(0, 2, 0, 0);
7647 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7649
7650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7653 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7654
7655 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7656 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7657
7658 IEM_MC_ADVANCE_RIP_AND_FINISH();
7659 IEM_MC_END();
7660 }
7661}
7662
7663/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7664FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7665{
7666 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7668 if (IEM_IS_MODRM_REG_MODE(bRm))
7669 {
7670 /*
7671 * XMM, XMM.
7672 */
7673 IEM_MC_BEGIN(0, 0, 0, 0);
7674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7675 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7676 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7677 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7678 IEM_GET_MODRM_REG(pVCpu, bRm));
7679 IEM_MC_ADVANCE_RIP_AND_FINISH();
7680 IEM_MC_END();
7681 }
7682 else
7683 {
7684 /*
7685 * [mem128], XMM.
7686 */
7687 IEM_MC_BEGIN(0, 2, 0, 0);
7688 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7690
7691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7693 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7694 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7695
7696 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7697 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7698
7699 IEM_MC_ADVANCE_RIP_AND_FINISH();
7700 IEM_MC_END();
7701 }
7702}
7703
7704/* Opcode 0xf2 0x0f 0x7f - invalid */
7705
7706
7707
7708/** Opcode 0x0f 0x80. */
7709FNIEMOP_DEF(iemOp_jo_Jv)
7710{
7711 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7712 IEMOP_HLP_MIN_386();
7713 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7714 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7715 {
7716 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7717 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7720 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7721 } IEM_MC_ELSE() {
7722 IEM_MC_ADVANCE_RIP_AND_FINISH();
7723 } IEM_MC_ENDIF();
7724 IEM_MC_END();
7725 }
7726 else
7727 {
7728 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7729 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7731 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7732 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7733 } IEM_MC_ELSE() {
7734 IEM_MC_ADVANCE_RIP_AND_FINISH();
7735 } IEM_MC_ENDIF();
7736 IEM_MC_END();
7737 }
7738}
7739
7740
7741/** Opcode 0x0f 0x81. */
7742FNIEMOP_DEF(iemOp_jno_Jv)
7743{
7744 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7745 IEMOP_HLP_MIN_386();
7746 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7747 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7748 {
7749 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7750 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7752 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7753 IEM_MC_ADVANCE_RIP_AND_FINISH();
7754 } IEM_MC_ELSE() {
7755 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7756 } IEM_MC_ENDIF();
7757 IEM_MC_END();
7758 }
7759 else
7760 {
7761 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7762 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7765 IEM_MC_ADVANCE_RIP_AND_FINISH();
7766 } IEM_MC_ELSE() {
7767 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7768 } IEM_MC_ENDIF();
7769 IEM_MC_END();
7770 }
7771}
7772
7773
7774/** Opcode 0x0f 0x82. */
7775FNIEMOP_DEF(iemOp_jc_Jv)
7776{
7777 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7778 IEMOP_HLP_MIN_386();
7779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7780 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7781 {
7782 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7783 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7785 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7786 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7787 } IEM_MC_ELSE() {
7788 IEM_MC_ADVANCE_RIP_AND_FINISH();
7789 } IEM_MC_ENDIF();
7790 IEM_MC_END();
7791 }
7792 else
7793 {
7794 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7795 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7797 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7798 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7799 } IEM_MC_ELSE() {
7800 IEM_MC_ADVANCE_RIP_AND_FINISH();
7801 } IEM_MC_ENDIF();
7802 IEM_MC_END();
7803 }
7804}
7805
7806
7807/** Opcode 0x0f 0x83. */
7808FNIEMOP_DEF(iemOp_jnc_Jv)
7809{
7810 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7811 IEMOP_HLP_MIN_386();
7812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7813 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7814 {
7815 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7816 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7819 IEM_MC_ADVANCE_RIP_AND_FINISH();
7820 } IEM_MC_ELSE() {
7821 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7822 } IEM_MC_ENDIF();
7823 IEM_MC_END();
7824 }
7825 else
7826 {
7827 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7828 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7831 IEM_MC_ADVANCE_RIP_AND_FINISH();
7832 } IEM_MC_ELSE() {
7833 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7834 } IEM_MC_ENDIF();
7835 IEM_MC_END();
7836 }
7837}
7838
7839
7840/** Opcode 0x0f 0x84. */
7841FNIEMOP_DEF(iemOp_je_Jv)
7842{
7843 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7844 IEMOP_HLP_MIN_386();
7845 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7846 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7847 {
7848 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7849 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7852 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7853 } IEM_MC_ELSE() {
7854 IEM_MC_ADVANCE_RIP_AND_FINISH();
7855 } IEM_MC_ENDIF();
7856 IEM_MC_END();
7857 }
7858 else
7859 {
7860 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7861 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7864 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7865 } IEM_MC_ELSE() {
7866 IEM_MC_ADVANCE_RIP_AND_FINISH();
7867 } IEM_MC_ENDIF();
7868 IEM_MC_END();
7869 }
7870}
7871
7872
7873/** Opcode 0x0f 0x85. */
7874FNIEMOP_DEF(iemOp_jne_Jv)
7875{
7876 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7877 IEMOP_HLP_MIN_386();
7878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7879 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7880 {
7881 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7882 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7884 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7885 IEM_MC_ADVANCE_RIP_AND_FINISH();
7886 } IEM_MC_ELSE() {
7887 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7888 } IEM_MC_ENDIF();
7889 IEM_MC_END();
7890 }
7891 else
7892 {
7893 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7894 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7896 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7897 IEM_MC_ADVANCE_RIP_AND_FINISH();
7898 } IEM_MC_ELSE() {
7899 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7900 } IEM_MC_ENDIF();
7901 IEM_MC_END();
7902 }
7903}
7904
7905
7906/** Opcode 0x0f 0x86. */
7907FNIEMOP_DEF(iemOp_jbe_Jv)
7908{
7909 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7910 IEMOP_HLP_MIN_386();
7911 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7912 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7913 {
7914 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7915 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7917 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7918 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7919 } IEM_MC_ELSE() {
7920 IEM_MC_ADVANCE_RIP_AND_FINISH();
7921 } IEM_MC_ENDIF();
7922 IEM_MC_END();
7923 }
7924 else
7925 {
7926 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7927 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7929 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7930 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7931 } IEM_MC_ELSE() {
7932 IEM_MC_ADVANCE_RIP_AND_FINISH();
7933 } IEM_MC_ENDIF();
7934 IEM_MC_END();
7935 }
7936}
7937
7938
7939/** Opcode 0x0f 0x87. */
7940FNIEMOP_DEF(iemOp_jnbe_Jv)
7941{
7942 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7943 IEMOP_HLP_MIN_386();
7944 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7945 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7946 {
7947 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7948 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7950 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7951 IEM_MC_ADVANCE_RIP_AND_FINISH();
7952 } IEM_MC_ELSE() {
7953 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7954 } IEM_MC_ENDIF();
7955 IEM_MC_END();
7956 }
7957 else
7958 {
7959 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7960 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7962 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7963 IEM_MC_ADVANCE_RIP_AND_FINISH();
7964 } IEM_MC_ELSE() {
7965 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7966 } IEM_MC_ENDIF();
7967 IEM_MC_END();
7968 }
7969}
7970
7971
7972/** Opcode 0x0f 0x88. */
7973FNIEMOP_DEF(iemOp_js_Jv)
7974{
7975 IEMOP_MNEMONIC(js_Jv, "js Jv");
7976 IEMOP_HLP_MIN_386();
7977 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7978 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7979 {
7980 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7981 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7984 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7985 } IEM_MC_ELSE() {
7986 IEM_MC_ADVANCE_RIP_AND_FINISH();
7987 } IEM_MC_ENDIF();
7988 IEM_MC_END();
7989 }
7990 else
7991 {
7992 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7993 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7995 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7996 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7997 } IEM_MC_ELSE() {
7998 IEM_MC_ADVANCE_RIP_AND_FINISH();
7999 } IEM_MC_ENDIF();
8000 IEM_MC_END();
8001 }
8002}
8003
8004
8005/** Opcode 0x0f 0x89. */
8006FNIEMOP_DEF(iemOp_jns_Jv)
8007{
8008 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8009 IEMOP_HLP_MIN_386();
8010 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8011 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8012 {
8013 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8014 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8016 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8017 IEM_MC_ADVANCE_RIP_AND_FINISH();
8018 } IEM_MC_ELSE() {
8019 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8020 } IEM_MC_ENDIF();
8021 IEM_MC_END();
8022 }
8023 else
8024 {
8025 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8026 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8029 IEM_MC_ADVANCE_RIP_AND_FINISH();
8030 } IEM_MC_ELSE() {
8031 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8032 } IEM_MC_ENDIF();
8033 IEM_MC_END();
8034 }
8035}
8036
8037
8038/** Opcode 0x0f 0x8a. */
8039FNIEMOP_DEF(iemOp_jp_Jv)
8040{
8041 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8042 IEMOP_HLP_MIN_386();
8043 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8044 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8045 {
8046 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8047 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8050 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8051 } IEM_MC_ELSE() {
8052 IEM_MC_ADVANCE_RIP_AND_FINISH();
8053 } IEM_MC_ENDIF();
8054 IEM_MC_END();
8055 }
8056 else
8057 {
8058 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8059 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8061 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8062 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8063 } IEM_MC_ELSE() {
8064 IEM_MC_ADVANCE_RIP_AND_FINISH();
8065 } IEM_MC_ENDIF();
8066 IEM_MC_END();
8067 }
8068}
8069
8070
8071/** Opcode 0x0f 0x8b. */
8072FNIEMOP_DEF(iemOp_jnp_Jv)
8073{
8074 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8075 IEMOP_HLP_MIN_386();
8076 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8077 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8078 {
8079 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8080 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8082 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8083 IEM_MC_ADVANCE_RIP_AND_FINISH();
8084 } IEM_MC_ELSE() {
8085 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8086 } IEM_MC_ENDIF();
8087 IEM_MC_END();
8088 }
8089 else
8090 {
8091 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8092 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8095 IEM_MC_ADVANCE_RIP_AND_FINISH();
8096 } IEM_MC_ELSE() {
8097 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8098 } IEM_MC_ENDIF();
8099 IEM_MC_END();
8100 }
8101}
8102
8103
8104/** Opcode 0x0f 0x8c. */
8105FNIEMOP_DEF(iemOp_jl_Jv)
8106{
8107 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8108 IEMOP_HLP_MIN_386();
8109 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8110 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8111 {
8112 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8113 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8115 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8116 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8117 } IEM_MC_ELSE() {
8118 IEM_MC_ADVANCE_RIP_AND_FINISH();
8119 } IEM_MC_ENDIF();
8120 IEM_MC_END();
8121 }
8122 else
8123 {
8124 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8125 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8127 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8128 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8129 } IEM_MC_ELSE() {
8130 IEM_MC_ADVANCE_RIP_AND_FINISH();
8131 } IEM_MC_ENDIF();
8132 IEM_MC_END();
8133 }
8134}
8135
8136
8137/** Opcode 0x0f 0x8d. */
8138FNIEMOP_DEF(iemOp_jnl_Jv)
8139{
8140 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8141 IEMOP_HLP_MIN_386();
8142 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8143 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8144 {
8145 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8146 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8148 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8149 IEM_MC_ADVANCE_RIP_AND_FINISH();
8150 } IEM_MC_ELSE() {
8151 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8152 } IEM_MC_ENDIF();
8153 IEM_MC_END();
8154 }
8155 else
8156 {
8157 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8158 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8160 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8161 IEM_MC_ADVANCE_RIP_AND_FINISH();
8162 } IEM_MC_ELSE() {
8163 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8164 } IEM_MC_ENDIF();
8165 IEM_MC_END();
8166 }
8167}
8168
8169
8170/** Opcode 0x0f 0x8e. */
8171FNIEMOP_DEF(iemOp_jle_Jv)
8172{
8173 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8174 IEMOP_HLP_MIN_386();
8175 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8176 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8177 {
8178 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8179 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8181 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8182 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8183 } IEM_MC_ELSE() {
8184 IEM_MC_ADVANCE_RIP_AND_FINISH();
8185 } IEM_MC_ENDIF();
8186 IEM_MC_END();
8187 }
8188 else
8189 {
8190 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8191 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8194 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8195 } IEM_MC_ELSE() {
8196 IEM_MC_ADVANCE_RIP_AND_FINISH();
8197 } IEM_MC_ENDIF();
8198 IEM_MC_END();
8199 }
8200}
8201
8202
8203/** Opcode 0x0f 0x8f. */
8204FNIEMOP_DEF(iemOp_jnle_Jv)
8205{
8206 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8207 IEMOP_HLP_MIN_386();
8208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8209 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8210 {
8211 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8212 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8214 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8215 IEM_MC_ADVANCE_RIP_AND_FINISH();
8216 } IEM_MC_ELSE() {
8217 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8218 } IEM_MC_ENDIF();
8219 IEM_MC_END();
8220 }
8221 else
8222 {
8223 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8224 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8226 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8227 IEM_MC_ADVANCE_RIP_AND_FINISH();
8228 } IEM_MC_ELSE() {
8229 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8230 } IEM_MC_ENDIF();
8231 IEM_MC_END();
8232 }
8233}
8234
8235
8236/** Opcode 0x0f 0x90. */
8237FNIEMOP_DEF(iemOp_seto_Eb)
8238{
8239 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8240 IEMOP_HLP_MIN_386();
8241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8242
8243 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8244 * any way. AMD says it's "unused", whatever that means. We're
8245 * ignoring for now. */
8246 if (IEM_IS_MODRM_REG_MODE(bRm))
8247 {
8248 /* register target */
8249 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8251 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8252 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8253 } IEM_MC_ELSE() {
8254 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8255 } IEM_MC_ENDIF();
8256 IEM_MC_ADVANCE_RIP_AND_FINISH();
8257 IEM_MC_END();
8258 }
8259 else
8260 {
8261 /* memory target */
8262 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8266 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8267 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8268 } IEM_MC_ELSE() {
8269 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8270 } IEM_MC_ENDIF();
8271 IEM_MC_ADVANCE_RIP_AND_FINISH();
8272 IEM_MC_END();
8273 }
8274}
8275
8276
8277/** Opcode 0x0f 0x91. */
8278FNIEMOP_DEF(iemOp_setno_Eb)
8279{
8280 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8281 IEMOP_HLP_MIN_386();
8282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8283
8284 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8285 * any way. AMD says it's "unused", whatever that means. We're
8286 * ignoring for now. */
8287 if (IEM_IS_MODRM_REG_MODE(bRm))
8288 {
8289 /* register target */
8290 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8292 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8293 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8294 } IEM_MC_ELSE() {
8295 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8296 } IEM_MC_ENDIF();
8297 IEM_MC_ADVANCE_RIP_AND_FINISH();
8298 IEM_MC_END();
8299 }
8300 else
8301 {
8302 /* memory target */
8303 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8308 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8309 } IEM_MC_ELSE() {
8310 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8311 } IEM_MC_ENDIF();
8312 IEM_MC_ADVANCE_RIP_AND_FINISH();
8313 IEM_MC_END();
8314 }
8315}
8316
8317
8318/** Opcode 0x0f 0x92. */
8319FNIEMOP_DEF(iemOp_setc_Eb)
8320{
8321 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8322 IEMOP_HLP_MIN_386();
8323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8324
8325 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8326 * any way. AMD says it's "unused", whatever that means. We're
8327 * ignoring for now. */
8328 if (IEM_IS_MODRM_REG_MODE(bRm))
8329 {
8330 /* register target */
8331 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8333 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8334 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8335 } IEM_MC_ELSE() {
8336 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8337 } IEM_MC_ENDIF();
8338 IEM_MC_ADVANCE_RIP_AND_FINISH();
8339 IEM_MC_END();
8340 }
8341 else
8342 {
8343 /* memory target */
8344 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8348 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8349 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8350 } IEM_MC_ELSE() {
8351 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8352 } IEM_MC_ENDIF();
8353 IEM_MC_ADVANCE_RIP_AND_FINISH();
8354 IEM_MC_END();
8355 }
8356}
8357
8358
8359/** Opcode 0x0f 0x93. */
8360FNIEMOP_DEF(iemOp_setnc_Eb)
8361{
8362 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8363 IEMOP_HLP_MIN_386();
8364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8365
8366 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8367 * any way. AMD says it's "unused", whatever that means. We're
8368 * ignoring for now. */
8369 if (IEM_IS_MODRM_REG_MODE(bRm))
8370 {
8371 /* register target */
8372 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8374 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8375 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8376 } IEM_MC_ELSE() {
8377 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8378 } IEM_MC_ENDIF();
8379 IEM_MC_ADVANCE_RIP_AND_FINISH();
8380 IEM_MC_END();
8381 }
8382 else
8383 {
8384 /* memory target */
8385 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8389 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8390 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8391 } IEM_MC_ELSE() {
8392 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8393 } IEM_MC_ENDIF();
8394 IEM_MC_ADVANCE_RIP_AND_FINISH();
8395 IEM_MC_END();
8396 }
8397}
8398
8399
8400/** Opcode 0x0f 0x94. */
8401FNIEMOP_DEF(iemOp_sete_Eb)
8402{
8403 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8404 IEMOP_HLP_MIN_386();
8405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8406
8407 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8408 * any way. AMD says it's "unused", whatever that means. We're
8409 * ignoring for now. */
8410 if (IEM_IS_MODRM_REG_MODE(bRm))
8411 {
8412 /* register target */
8413 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8415 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8416 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8417 } IEM_MC_ELSE() {
8418 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8419 } IEM_MC_ENDIF();
8420 IEM_MC_ADVANCE_RIP_AND_FINISH();
8421 IEM_MC_END();
8422 }
8423 else
8424 {
8425 /* memory target */
8426 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8430 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8431 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8432 } IEM_MC_ELSE() {
8433 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8434 } IEM_MC_ENDIF();
8435 IEM_MC_ADVANCE_RIP_AND_FINISH();
8436 IEM_MC_END();
8437 }
8438}
8439
8440
8441/** Opcode 0x0f 0x95. */
8442FNIEMOP_DEF(iemOp_setne_Eb)
8443{
8444 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8445 IEMOP_HLP_MIN_386();
8446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8447
8448 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8449 * any way. AMD says it's "unused", whatever that means. We're
8450 * ignoring for now. */
8451 if (IEM_IS_MODRM_REG_MODE(bRm))
8452 {
8453 /* register target */
8454 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8457 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8458 } IEM_MC_ELSE() {
8459 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8460 } IEM_MC_ENDIF();
8461 IEM_MC_ADVANCE_RIP_AND_FINISH();
8462 IEM_MC_END();
8463 }
8464 else
8465 {
8466 /* memory target */
8467 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8471 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8472 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8473 } IEM_MC_ELSE() {
8474 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8475 } IEM_MC_ENDIF();
8476 IEM_MC_ADVANCE_RIP_AND_FINISH();
8477 IEM_MC_END();
8478 }
8479}
8480
8481
8482/** Opcode 0x0f 0x96. */
8483FNIEMOP_DEF(iemOp_setbe_Eb)
8484{
8485 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8486 IEMOP_HLP_MIN_386();
8487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8488
8489 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8490 * any way. AMD says it's "unused", whatever that means. We're
8491 * ignoring for now. */
8492 if (IEM_IS_MODRM_REG_MODE(bRm))
8493 {
8494 /* register target */
8495 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8497 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8498 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8499 } IEM_MC_ELSE() {
8500 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8501 } IEM_MC_ENDIF();
8502 IEM_MC_ADVANCE_RIP_AND_FINISH();
8503 IEM_MC_END();
8504 }
8505 else
8506 {
8507 /* memory target */
8508 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8512 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8513 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8514 } IEM_MC_ELSE() {
8515 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8516 } IEM_MC_ENDIF();
8517 IEM_MC_ADVANCE_RIP_AND_FINISH();
8518 IEM_MC_END();
8519 }
8520}
8521
8522
8523/** Opcode 0x0f 0x97. */
8524FNIEMOP_DEF(iemOp_setnbe_Eb)
8525{
8526 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8527 IEMOP_HLP_MIN_386();
8528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8529
8530 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8531 * any way. AMD says it's "unused", whatever that means. We're
8532 * ignoring for now. */
8533 if (IEM_IS_MODRM_REG_MODE(bRm))
8534 {
8535 /* register target */
8536 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8538 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8539 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8540 } IEM_MC_ELSE() {
8541 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8542 } IEM_MC_ENDIF();
8543 IEM_MC_ADVANCE_RIP_AND_FINISH();
8544 IEM_MC_END();
8545 }
8546 else
8547 {
8548 /* memory target */
8549 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8553 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8554 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8555 } IEM_MC_ELSE() {
8556 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8557 } IEM_MC_ENDIF();
8558 IEM_MC_ADVANCE_RIP_AND_FINISH();
8559 IEM_MC_END();
8560 }
8561}
8562
8563
8564/** Opcode 0x0f 0x98. */
8565FNIEMOP_DEF(iemOp_sets_Eb)
8566{
8567 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8568 IEMOP_HLP_MIN_386();
8569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8570
8571 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8572 * any way. AMD says it's "unused", whatever that means. We're
8573 * ignoring for now. */
8574 if (IEM_IS_MODRM_REG_MODE(bRm))
8575 {
8576 /* register target */
8577 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8579 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8580 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8581 } IEM_MC_ELSE() {
8582 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8583 } IEM_MC_ENDIF();
8584 IEM_MC_ADVANCE_RIP_AND_FINISH();
8585 IEM_MC_END();
8586 }
8587 else
8588 {
8589 /* memory target */
8590 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8595 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8596 } IEM_MC_ELSE() {
8597 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8598 } IEM_MC_ENDIF();
8599 IEM_MC_ADVANCE_RIP_AND_FINISH();
8600 IEM_MC_END();
8601 }
8602}
8603
8604
8605/** Opcode 0x0f 0x99. */
8606FNIEMOP_DEF(iemOp_setns_Eb)
8607{
8608 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8609 IEMOP_HLP_MIN_386();
8610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8611
8612 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8613 * any way. AMD says it's "unused", whatever that means. We're
8614 * ignoring for now. */
8615 if (IEM_IS_MODRM_REG_MODE(bRm))
8616 {
8617 /* register target */
8618 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8621 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8622 } IEM_MC_ELSE() {
8623 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8624 } IEM_MC_ENDIF();
8625 IEM_MC_ADVANCE_RIP_AND_FINISH();
8626 IEM_MC_END();
8627 }
8628 else
8629 {
8630 /* memory target */
8631 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8636 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8637 } IEM_MC_ELSE() {
8638 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8639 } IEM_MC_ENDIF();
8640 IEM_MC_ADVANCE_RIP_AND_FINISH();
8641 IEM_MC_END();
8642 }
8643}
8644
8645
8646/** Opcode 0x0f 0x9a. */
8647FNIEMOP_DEF(iemOp_setp_Eb)
8648{
8649 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8650 IEMOP_HLP_MIN_386();
8651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8652
8653 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8654 * any way. AMD says it's "unused", whatever that means. We're
8655 * ignoring for now. */
8656 if (IEM_IS_MODRM_REG_MODE(bRm))
8657 {
8658 /* register target */
8659 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8661 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8662 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8663 } IEM_MC_ELSE() {
8664 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8665 } IEM_MC_ENDIF();
8666 IEM_MC_ADVANCE_RIP_AND_FINISH();
8667 IEM_MC_END();
8668 }
8669 else
8670 {
8671 /* memory target */
8672 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8677 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8678 } IEM_MC_ELSE() {
8679 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8680 } IEM_MC_ENDIF();
8681 IEM_MC_ADVANCE_RIP_AND_FINISH();
8682 IEM_MC_END();
8683 }
8684}
8685
8686
8687/** Opcode 0x0f 0x9b. */
8688FNIEMOP_DEF(iemOp_setnp_Eb)
8689{
8690 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8691 IEMOP_HLP_MIN_386();
8692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8693
8694 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8695 * any way. AMD says it's "unused", whatever that means. We're
8696 * ignoring for now. */
8697 if (IEM_IS_MODRM_REG_MODE(bRm))
8698 {
8699 /* register target */
8700 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8703 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8704 } IEM_MC_ELSE() {
8705 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8706 } IEM_MC_ENDIF();
8707 IEM_MC_ADVANCE_RIP_AND_FINISH();
8708 IEM_MC_END();
8709 }
8710 else
8711 {
8712 /* memory target */
8713 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8717 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8718 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8719 } IEM_MC_ELSE() {
8720 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8721 } IEM_MC_ENDIF();
8722 IEM_MC_ADVANCE_RIP_AND_FINISH();
8723 IEM_MC_END();
8724 }
8725}
8726
8727
8728/** Opcode 0x0f 0x9c. */
8729FNIEMOP_DEF(iemOp_setl_Eb)
8730{
8731 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8732 IEMOP_HLP_MIN_386();
8733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8734
8735 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8736 * any way. AMD says it's "unused", whatever that means. We're
8737 * ignoring for now. */
8738 if (IEM_IS_MODRM_REG_MODE(bRm))
8739 {
8740 /* register target */
8741 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8743 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8744 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8745 } IEM_MC_ELSE() {
8746 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8747 } IEM_MC_ENDIF();
8748 IEM_MC_ADVANCE_RIP_AND_FINISH();
8749 IEM_MC_END();
8750 }
8751 else
8752 {
8753 /* memory target */
8754 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8758 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8759 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8760 } IEM_MC_ELSE() {
8761 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8762 } IEM_MC_ENDIF();
8763 IEM_MC_ADVANCE_RIP_AND_FINISH();
8764 IEM_MC_END();
8765 }
8766}
8767
8768
8769/** Opcode 0x0f 0x9d. */
8770FNIEMOP_DEF(iemOp_setnl_Eb)
8771{
8772 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8773 IEMOP_HLP_MIN_386();
8774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8775
8776 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8777 * any way. AMD says it's "unused", whatever that means. We're
8778 * ignoring for now. */
8779 if (IEM_IS_MODRM_REG_MODE(bRm))
8780 {
8781 /* register target */
8782 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8784 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8785 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8786 } IEM_MC_ELSE() {
8787 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8788 } IEM_MC_ENDIF();
8789 IEM_MC_ADVANCE_RIP_AND_FINISH();
8790 IEM_MC_END();
8791 }
8792 else
8793 {
8794 /* memory target */
8795 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8799 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8800 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8801 } IEM_MC_ELSE() {
8802 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8803 } IEM_MC_ENDIF();
8804 IEM_MC_ADVANCE_RIP_AND_FINISH();
8805 IEM_MC_END();
8806 }
8807}
8808
8809
8810/** Opcode 0x0f 0x9e. */
8811FNIEMOP_DEF(iemOp_setle_Eb)
8812{
8813 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8814 IEMOP_HLP_MIN_386();
8815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8816
8817 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8818 * any way. AMD says it's "unused", whatever that means. We're
8819 * ignoring for now. */
8820 if (IEM_IS_MODRM_REG_MODE(bRm))
8821 {
8822 /* register target */
8823 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8825 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8826 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8827 } IEM_MC_ELSE() {
8828 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8829 } IEM_MC_ENDIF();
8830 IEM_MC_ADVANCE_RIP_AND_FINISH();
8831 IEM_MC_END();
8832 }
8833 else
8834 {
8835 /* memory target */
8836 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8840 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8841 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8842 } IEM_MC_ELSE() {
8843 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8844 } IEM_MC_ENDIF();
8845 IEM_MC_ADVANCE_RIP_AND_FINISH();
8846 IEM_MC_END();
8847 }
8848}
8849
8850
8851/** Opcode 0x0f 0x9f. */
8852FNIEMOP_DEF(iemOp_setnle_Eb)
8853{
8854 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8855 IEMOP_HLP_MIN_386();
8856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8857
8858 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8859 * any way. AMD says it's "unused", whatever that means. We're
8860 * ignoring for now. */
8861 if (IEM_IS_MODRM_REG_MODE(bRm))
8862 {
8863 /* register target */
8864 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8866 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8867 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8868 } IEM_MC_ELSE() {
8869 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8870 } IEM_MC_ENDIF();
8871 IEM_MC_ADVANCE_RIP_AND_FINISH();
8872 IEM_MC_END();
8873 }
8874 else
8875 {
8876 /* memory target */
8877 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8881 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8882 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8883 } IEM_MC_ELSE() {
8884 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8885 } IEM_MC_ENDIF();
8886 IEM_MC_ADVANCE_RIP_AND_FINISH();
8887 IEM_MC_END();
8888 }
8889}
8890
8891
8892/** Opcode 0x0f 0xa0. */
8893FNIEMOP_DEF(iemOp_push_fs)
8894{
8895 IEMOP_MNEMONIC(push_fs, "push fs");
8896 IEMOP_HLP_MIN_386();
8897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8898 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8899}
8900
8901
8902/** Opcode 0x0f 0xa1. */
8903FNIEMOP_DEF(iemOp_pop_fs)
8904{
8905 IEMOP_MNEMONIC(pop_fs, "pop fs");
8906 IEMOP_HLP_MIN_386();
8907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8908 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8909}
8910
8911
8912/** Opcode 0x0f 0xa2. */
8913FNIEMOP_DEF(iemOp_cpuid)
8914{
8915 IEMOP_MNEMONIC(cpuid, "cpuid");
8916 IEMOP_HLP_MIN_486(); /* not all 486es. */
8917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8918 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_cpuid);
8919}
8920
8921
8922/**
8923 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8924 * iemOp_bts_Ev_Gv.
8925 */
8926
8927#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8929 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8930 \
8931 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8932 { \
8933 /* register destination. */ \
8934 switch (pVCpu->iem.s.enmEffOpSize) \
8935 { \
8936 case IEMMODE_16BIT: \
8937 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8939 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8940 IEM_MC_ARG(uint16_t, u16Src, 1); \
8941 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8942 \
8943 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8944 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8945 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8946 IEM_MC_REF_EFLAGS(pEFlags); \
8947 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8948 \
8949 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8950 IEM_MC_END(); \
8951 break; \
8952 \
8953 case IEMMODE_32BIT: \
8954 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8956 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8957 IEM_MC_ARG(uint32_t, u32Src, 1); \
8958 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8959 \
8960 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8961 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8962 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8963 IEM_MC_REF_EFLAGS(pEFlags); \
8964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8965 \
8966 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
8967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8968 IEM_MC_END(); \
8969 break; \
8970 \
8971 case IEMMODE_64BIT: \
8972 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8974 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8975 IEM_MC_ARG(uint64_t, u64Src, 1); \
8976 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8977 \
8978 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8979 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8980 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8981 IEM_MC_REF_EFLAGS(pEFlags); \
8982 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8983 \
8984 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8985 IEM_MC_END(); \
8986 break; \
8987 \
8988 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8989 } \
8990 } \
8991 else \
8992 { \
8993 /* memory destination. */ \
8994 /** @todo test negative bit offsets! */ \
8995 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
8996 { \
8997 switch (pVCpu->iem.s.enmEffOpSize) \
8998 { \
8999 case IEMMODE_16BIT: \
9000 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9001 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9002 IEM_MC_ARG(uint16_t, u16Src, 1); \
9003 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9005 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9006 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9007 \
9008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9009 IEMOP_HLP_DONE_DECODING(); \
9010 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9011 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9012 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9013 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9014 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9015 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9016 IEM_MC_FETCH_EFLAGS(EFlags); \
9017 \
9018 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9019 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9020 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9021 \
9022 IEM_MC_COMMIT_EFLAGS(EFlags); \
9023 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9024 IEM_MC_END(); \
9025 break; \
9026 \
9027 case IEMMODE_32BIT: \
9028 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9029 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9030 IEM_MC_ARG(uint32_t, u32Src, 1); \
9031 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9033 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9034 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9035 \
9036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9037 IEMOP_HLP_DONE_DECODING(); \
9038 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9039 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9040 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9041 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9042 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9043 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9044 IEM_MC_FETCH_EFLAGS(EFlags); \
9045 \
9046 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9047 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9048 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9049 \
9050 IEM_MC_COMMIT_EFLAGS(EFlags); \
9051 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9052 IEM_MC_END(); \
9053 break; \
9054 \
9055 case IEMMODE_64BIT: \
9056 IEM_MC_BEGIN(3, 5, IEM_MC_F_64BIT, 0); \
9057 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9058 IEM_MC_ARG(uint64_t, u64Src, 1); \
9059 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9061 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9062 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9063 \
9064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9065 IEMOP_HLP_DONE_DECODING(); \
9066 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9067 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9068 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9069 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9070 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9071 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9072 IEM_MC_FETCH_EFLAGS(EFlags); \
9073 \
9074 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9075 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9076 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9077 \
9078 IEM_MC_COMMIT_EFLAGS(EFlags); \
9079 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9080 IEM_MC_END(); \
9081 break; \
9082 \
9083 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9084 } \
9085 } \
9086 else \
9087 { \
9088 (void)0
9089/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9090#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9091 switch (pVCpu->iem.s.enmEffOpSize) \
9092 { \
9093 case IEMMODE_16BIT: \
9094 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9095 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9096 IEM_MC_ARG(uint16_t, u16Src, 1); \
9097 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9099 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9100 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9101 \
9102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9103 IEMOP_HLP_DONE_DECODING(); \
9104 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9105 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9106 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9107 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9108 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9109 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9110 IEM_MC_FETCH_EFLAGS(EFlags); \
9111 \
9112 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9113 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9114 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9115 \
9116 IEM_MC_COMMIT_EFLAGS(EFlags); \
9117 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9118 IEM_MC_END(); \
9119 break; \
9120 \
9121 case IEMMODE_32BIT: \
9122 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9123 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9124 IEM_MC_ARG(uint32_t, u32Src, 1); \
9125 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9127 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9128 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9129 \
9130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9131 IEMOP_HLP_DONE_DECODING(); \
9132 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9133 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9134 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9135 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9136 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9137 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9138 IEM_MC_FETCH_EFLAGS(EFlags); \
9139 \
9140 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9141 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9142 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9143 \
9144 IEM_MC_COMMIT_EFLAGS(EFlags); \
9145 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9146 IEM_MC_END(); \
9147 break; \
9148 \
9149 case IEMMODE_64BIT: \
9150 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9151 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9152 IEM_MC_ARG(uint64_t, u64Src, 1); \
9153 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9155 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9156 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9157 \
9158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9159 IEMOP_HLP_DONE_DECODING(); \
9160 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9161 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9162 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9163 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9164 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9165 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9166 IEM_MC_FETCH_EFLAGS(EFlags); \
9167 \
9168 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9169 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9170 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9171 \
9172 IEM_MC_COMMIT_EFLAGS(EFlags); \
9173 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9174 IEM_MC_END(); \
9175 break; \
9176 \
9177 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9178 } \
9179 } \
9180 } \
9181 (void)0
9182
9183/* Read-only version (bt). */
9184#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9186 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9187 \
9188 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9189 { \
9190 /* register destination. */ \
9191 switch (pVCpu->iem.s.enmEffOpSize) \
9192 { \
9193 case IEMMODE_16BIT: \
9194 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9196 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9197 IEM_MC_ARG(uint16_t, u16Src, 1); \
9198 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9199 \
9200 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9201 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9202 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9203 IEM_MC_REF_EFLAGS(pEFlags); \
9204 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9205 \
9206 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9207 IEM_MC_END(); \
9208 break; \
9209 \
9210 case IEMMODE_32BIT: \
9211 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9213 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9214 IEM_MC_ARG(uint32_t, u32Src, 1); \
9215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9216 \
9217 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9218 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9219 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9220 IEM_MC_REF_EFLAGS(pEFlags); \
9221 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9222 \
9223 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9224 IEM_MC_END(); \
9225 break; \
9226 \
9227 case IEMMODE_64BIT: \
9228 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9230 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9231 IEM_MC_ARG(uint64_t, u64Src, 1); \
9232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9233 \
9234 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9235 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9236 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9237 IEM_MC_REF_EFLAGS(pEFlags); \
9238 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9239 \
9240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9241 IEM_MC_END(); \
9242 break; \
9243 \
9244 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9245 } \
9246 } \
9247 else \
9248 { \
9249 /* memory destination. */ \
9250 /** @todo test negative bit offsets! */ \
9251 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9252 { \
9253 switch (pVCpu->iem.s.enmEffOpSize) \
9254 { \
9255 case IEMMODE_16BIT: \
9256 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9257 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9258 IEM_MC_ARG(uint16_t, u16Src, 1); \
9259 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9261 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9262 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9263 \
9264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9265 IEMOP_HLP_DONE_DECODING(); \
9266 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9267 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9268 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9269 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9270 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9271 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9272 IEM_MC_FETCH_EFLAGS(EFlags); \
9273 \
9274 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9275 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9276 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
9277 \
9278 IEM_MC_COMMIT_EFLAGS(EFlags); \
9279 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9280 IEM_MC_END(); \
9281 break; \
9282 \
9283 case IEMMODE_32BIT: \
9284 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9285 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9286 IEM_MC_ARG(uint32_t, u32Src, 1); \
9287 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9289 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9290 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9291 \
9292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9293 IEMOP_HLP_DONE_DECODING(); \
9294 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9295 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9296 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9297 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9298 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9299 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9300 IEM_MC_FETCH_EFLAGS(EFlags); \
9301 \
9302 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9303 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9304 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
9305 \
9306 IEM_MC_COMMIT_EFLAGS(EFlags); \
9307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9308 IEM_MC_END(); \
9309 break; \
9310 \
9311 case IEMMODE_64BIT: \
9312 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9313 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9314 IEM_MC_ARG(uint64_t, u64Src, 1); \
9315 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9317 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9318 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9319 \
9320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9321 IEMOP_HLP_DONE_DECODING(); \
9322 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9323 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9324 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9325 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9326 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9327 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9328 IEM_MC_FETCH_EFLAGS(EFlags); \
9329 \
9330 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9331 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9332 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
9333 \
9334 IEM_MC_COMMIT_EFLAGS(EFlags); \
9335 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9336 IEM_MC_END(); \
9337 break; \
9338 \
9339 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9340 } \
9341 } \
9342 else \
9343 { \
9344 IEMOP_HLP_DONE_DECODING(); \
9345 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9346 } \
9347 } \
9348 (void)0
9349
9350
9351/** Opcode 0x0f 0xa3. */
9352FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9353{
9354 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9355 IEMOP_HLP_MIN_386();
9356 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9357}
9358
9359
9360/**
9361 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9362 */
9363FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9364{
9365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9366 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9367
9368 if (IEM_IS_MODRM_REG_MODE(bRm))
9369 {
9370 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9371
9372 switch (pVCpu->iem.s.enmEffOpSize)
9373 {
9374 case IEMMODE_16BIT:
9375 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9377 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9378 IEM_MC_ARG(uint16_t, u16Src, 1);
9379 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9380 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9381
9382 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9383 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9384 IEM_MC_REF_EFLAGS(pEFlags);
9385 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9386
9387 IEM_MC_ADVANCE_RIP_AND_FINISH();
9388 IEM_MC_END();
9389 break;
9390
9391 case IEMMODE_32BIT:
9392 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9395 IEM_MC_ARG(uint32_t, u32Src, 1);
9396 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9397 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9398
9399 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9400 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9401 IEM_MC_REF_EFLAGS(pEFlags);
9402 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9403
9404 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9405 IEM_MC_ADVANCE_RIP_AND_FINISH();
9406 IEM_MC_END();
9407 break;
9408
9409 case IEMMODE_64BIT:
9410 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9412 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9413 IEM_MC_ARG(uint64_t, u64Src, 1);
9414 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9415 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9416
9417 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9418 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9419 IEM_MC_REF_EFLAGS(pEFlags);
9420 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9421
9422 IEM_MC_ADVANCE_RIP_AND_FINISH();
9423 IEM_MC_END();
9424 break;
9425
9426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9427 }
9428 }
9429 else
9430 {
9431 switch (pVCpu->iem.s.enmEffOpSize)
9432 {
9433 case IEMMODE_16BIT:
9434 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9435 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9436 IEM_MC_ARG(uint16_t, u16Src, 1);
9437 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9438 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9440 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9441
9442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9443 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9444 IEM_MC_ASSIGN(cShiftArg, cShift);
9445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9446 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9447 IEM_MC_FETCH_EFLAGS(EFlags);
9448 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9449 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9450
9451 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9452 IEM_MC_COMMIT_EFLAGS(EFlags);
9453 IEM_MC_ADVANCE_RIP_AND_FINISH();
9454 IEM_MC_END();
9455 break;
9456
9457 case IEMMODE_32BIT:
9458 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9459 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9460 IEM_MC_ARG(uint32_t, u32Src, 1);
9461 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9462 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9464 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9465
9466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9467 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9468 IEM_MC_ASSIGN(cShiftArg, cShift);
9469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9470 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9471 IEM_MC_FETCH_EFLAGS(EFlags);
9472 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9473 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9474
9475 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9476 IEM_MC_COMMIT_EFLAGS(EFlags);
9477 IEM_MC_ADVANCE_RIP_AND_FINISH();
9478 IEM_MC_END();
9479 break;
9480
9481 case IEMMODE_64BIT:
9482 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9483 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9484 IEM_MC_ARG(uint64_t, u64Src, 1);
9485 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9486 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9488 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9489
9490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9491 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9492 IEM_MC_ASSIGN(cShiftArg, cShift);
9493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9494 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9495 IEM_MC_FETCH_EFLAGS(EFlags);
9496 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9497 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9498
9499 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9500 IEM_MC_COMMIT_EFLAGS(EFlags);
9501 IEM_MC_ADVANCE_RIP_AND_FINISH();
9502 IEM_MC_END();
9503 break;
9504
9505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9506 }
9507 }
9508}
9509
9510
9511/**
9512 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9513 */
9514FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9515{
9516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9517 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9518
9519 if (IEM_IS_MODRM_REG_MODE(bRm))
9520 {
9521 switch (pVCpu->iem.s.enmEffOpSize)
9522 {
9523 case IEMMODE_16BIT:
9524 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9526 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9527 IEM_MC_ARG(uint16_t, u16Src, 1);
9528 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9529 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9530
9531 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9532 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9533 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9534 IEM_MC_REF_EFLAGS(pEFlags);
9535 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9536
9537 IEM_MC_ADVANCE_RIP_AND_FINISH();
9538 IEM_MC_END();
9539 break;
9540
9541 case IEMMODE_32BIT:
9542 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9544 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9545 IEM_MC_ARG(uint32_t, u32Src, 1);
9546 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9547 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9548
9549 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9550 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9551 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9552 IEM_MC_REF_EFLAGS(pEFlags);
9553 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9554
9555 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9556 IEM_MC_ADVANCE_RIP_AND_FINISH();
9557 IEM_MC_END();
9558 break;
9559
9560 case IEMMODE_64BIT:
9561 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9563 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9564 IEM_MC_ARG(uint64_t, u64Src, 1);
9565 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9566 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9567
9568 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9569 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9570 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9571 IEM_MC_REF_EFLAGS(pEFlags);
9572 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9573
9574 IEM_MC_ADVANCE_RIP_AND_FINISH();
9575 IEM_MC_END();
9576 break;
9577
9578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9579 }
9580 }
9581 else
9582 {
9583 switch (pVCpu->iem.s.enmEffOpSize)
9584 {
9585 case IEMMODE_16BIT:
9586 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9587 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9588 IEM_MC_ARG(uint16_t, u16Src, 1);
9589 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9592 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9593
9594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9596 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9597 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9598 IEM_MC_FETCH_EFLAGS(EFlags);
9599 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9600 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9601
9602 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9603 IEM_MC_COMMIT_EFLAGS(EFlags);
9604 IEM_MC_ADVANCE_RIP_AND_FINISH();
9605 IEM_MC_END();
9606 break;
9607
9608 case IEMMODE_32BIT:
9609 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9610 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9611 IEM_MC_ARG(uint32_t, u32Src, 1);
9612 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9613 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9615 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9616
9617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9619 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9620 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9621 IEM_MC_FETCH_EFLAGS(EFlags);
9622 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9623 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9624
9625 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9626 IEM_MC_COMMIT_EFLAGS(EFlags);
9627 IEM_MC_ADVANCE_RIP_AND_FINISH();
9628 IEM_MC_END();
9629 break;
9630
9631 case IEMMODE_64BIT:
9632 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9633 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9634 IEM_MC_ARG(uint64_t, u64Src, 1);
9635 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9636 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9638 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9639
9640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9642 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9643 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9644 IEM_MC_FETCH_EFLAGS(EFlags);
9645 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9646 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9647
9648 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9649 IEM_MC_COMMIT_EFLAGS(EFlags);
9650 IEM_MC_ADVANCE_RIP_AND_FINISH();
9651 IEM_MC_END();
9652 break;
9653
9654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9655 }
9656 }
9657}
9658
9659
9660
9661/** Opcode 0x0f 0xa4. */
9662FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9663{
9664 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9665 IEMOP_HLP_MIN_386();
9666 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9667}
9668
9669
9670/** Opcode 0x0f 0xa5. */
9671FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9672{
9673 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9674 IEMOP_HLP_MIN_386();
9675 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9676}
9677
9678
9679/** Opcode 0x0f 0xa8. */
9680FNIEMOP_DEF(iemOp_push_gs)
9681{
9682 IEMOP_MNEMONIC(push_gs, "push gs");
9683 IEMOP_HLP_MIN_386();
9684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9685 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9686}
9687
9688
9689/** Opcode 0x0f 0xa9. */
9690FNIEMOP_DEF(iemOp_pop_gs)
9691{
9692 IEMOP_MNEMONIC(pop_gs, "pop gs");
9693 IEMOP_HLP_MIN_386();
9694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9695 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9696}
9697
9698
9699/** Opcode 0x0f 0xaa. */
9700FNIEMOP_DEF(iemOp_rsm)
9701{
9702 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9703 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9705 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
9706 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
9707 iemCImpl_rsm);
9708}
9709
9710
9711
9712/** Opcode 0x0f 0xab. */
9713FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9714{
9715 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9716 IEMOP_HLP_MIN_386();
9717 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9718 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9719}
9720
9721
9722/** Opcode 0x0f 0xac. */
9723FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9724{
9725 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9726 IEMOP_HLP_MIN_386();
9727 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9728}
9729
9730
9731/** Opcode 0x0f 0xad. */
9732FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9733{
9734 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9735 IEMOP_HLP_MIN_386();
9736 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9737}
9738
9739
9740/** Opcode 0x0f 0xae mem/0. */
9741FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9742{
9743 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9744 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9745 IEMOP_RAISE_INVALID_OPCODE_RET();
9746
9747 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9748 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9749 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9750 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9753 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9754 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9755 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9756 IEM_MC_END();
9757}
9758
9759
9760/** Opcode 0x0f 0xae mem/1. */
9761FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9762{
9763 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9764 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9765 IEMOP_RAISE_INVALID_OPCODE_RET();
9766
9767 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9768 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9769 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9770 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9773 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9774 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9775 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9776 IEM_MC_END();
9777}
9778
9779
9780/**
9781 * @opmaps grp15
9782 * @opcode !11/2
9783 * @oppfx none
9784 * @opcpuid sse
9785 * @opgroup og_sse_mxcsrsm
9786 * @opxcpttype 5
9787 * @optest op1=0 -> mxcsr=0
9788 * @optest op1=0x2083 -> mxcsr=0x2083
9789 * @optest op1=0xfffffffe -> value.xcpt=0xd
9790 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9791 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9792 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9793 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9794 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9795 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9796 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9797 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9798 */
9799FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9800{
9801 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9802 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9803 IEMOP_RAISE_INVALID_OPCODE_RET();
9804
9805 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9806 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9807 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9811 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9812 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9813 IEM_MC_END();
9814}
9815
9816
9817/**
9818 * @opmaps grp15
9819 * @opcode !11/3
9820 * @oppfx none
9821 * @opcpuid sse
9822 * @opgroup og_sse_mxcsrsm
9823 * @opxcpttype 5
9824 * @optest mxcsr=0 -> op1=0
9825 * @optest mxcsr=0x2083 -> op1=0x2083
9826 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9827 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9828 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9829 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9830 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9831 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9832 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9833 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9834 */
9835FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9836{
9837 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9838 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9839 IEMOP_RAISE_INVALID_OPCODE_RET();
9840
9841 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9842 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9843 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9846 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9847 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9848 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9849 IEM_MC_END();
9850}
9851
9852
9853/**
9854 * @opmaps grp15
9855 * @opcode !11/4
9856 * @oppfx none
9857 * @opcpuid xsave
9858 * @opgroup og_system
9859 * @opxcpttype none
9860 */
9861FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9862{
9863 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9864 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9865 IEMOP_RAISE_INVALID_OPCODE_RET();
9866
9867 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9868 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9869 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9870 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9873 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9874 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9875 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9876 IEM_MC_END();
9877}
9878
9879
9880/**
9881 * @opmaps grp15
9882 * @opcode !11/5
9883 * @oppfx none
9884 * @opcpuid xsave
9885 * @opgroup og_system
9886 * @opxcpttype none
9887 */
9888FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9889{
9890 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9891 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9892 IEMOP_RAISE_INVALID_OPCODE_RET();
9893
9894 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9895 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9896 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9897 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9900 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9901 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9902 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9903 IEM_MC_END();
9904}
9905
9906/** Opcode 0x0f 0xae mem/6. */
9907FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9908
9909/**
9910 * @opmaps grp15
9911 * @opcode !11/7
9912 * @oppfx none
9913 * @opcpuid clfsh
9914 * @opgroup og_cachectl
9915 * @optest op1=1 ->
9916 */
9917FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9918{
9919 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9920 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9921 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9922
9923 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9924 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9925 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9928 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9929 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9930 IEM_MC_END();
9931}
9932
9933/**
9934 * @opmaps grp15
9935 * @opcode !11/7
9936 * @oppfx 0x66
9937 * @opcpuid clflushopt
9938 * @opgroup og_cachectl
9939 * @optest op1=1 ->
9940 */
9941FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9942{
9943 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9944 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9945 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9946
9947 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9948 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9949 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9952 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9953 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9954 IEM_MC_END();
9955}
9956
9957
9958/** Opcode 0x0f 0xae 11b/5. */
9959FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9960{
9961 RT_NOREF_PV(bRm);
9962 IEMOP_MNEMONIC(lfence, "lfence");
9963 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9965#ifdef RT_ARCH_ARM64
9966 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9967#else
9968 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9969 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9970 else
9971 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9972#endif
9973 IEM_MC_ADVANCE_RIP_AND_FINISH();
9974 IEM_MC_END();
9975}
9976
9977
9978/** Opcode 0x0f 0xae 11b/6. */
9979FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9980{
9981 RT_NOREF_PV(bRm);
9982 IEMOP_MNEMONIC(mfence, "mfence");
9983 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9985#ifdef RT_ARCH_ARM64
9986 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9987#else
9988 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9989 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9990 else
9991 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9992#endif
9993 IEM_MC_ADVANCE_RIP_AND_FINISH();
9994 IEM_MC_END();
9995}
9996
9997
9998/** Opcode 0x0f 0xae 11b/7. */
9999FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
10000{
10001 RT_NOREF_PV(bRm);
10002 IEMOP_MNEMONIC(sfence, "sfence");
10003 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10005#ifdef RT_ARCH_ARM64
10006 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10007#else
10008 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10009 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10010 else
10011 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10012#endif
10013 IEM_MC_ADVANCE_RIP_AND_FINISH();
10014 IEM_MC_END();
10015}
10016
10017
10018/** Opcode 0xf3 0x0f 0xae 11b/0. */
10019FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10020{
10021 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10022 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10023 {
10024 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
10025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10026 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10027 IEM_MC_ARG(uint64_t, u64Dst, 0);
10028 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10029 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10030 IEM_MC_ADVANCE_RIP_AND_FINISH();
10031 IEM_MC_END();
10032 }
10033 else
10034 {
10035 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10037 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10038 IEM_MC_ARG(uint32_t, u32Dst, 0);
10039 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10040 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10041 IEM_MC_ADVANCE_RIP_AND_FINISH();
10042 IEM_MC_END();
10043 }
10044}
10045
10046
10047/** Opcode 0xf3 0x0f 0xae 11b/1. */
10048FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10049{
10050 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10051 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10052 {
10053 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
10054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10055 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10056 IEM_MC_ARG(uint64_t, u64Dst, 0);
10057 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10058 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10059 IEM_MC_ADVANCE_RIP_AND_FINISH();
10060 IEM_MC_END();
10061 }
10062 else
10063 {
10064 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10066 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10067 IEM_MC_ARG(uint32_t, u32Dst, 0);
10068 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10069 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10070 IEM_MC_ADVANCE_RIP_AND_FINISH();
10071 IEM_MC_END();
10072 }
10073}
10074
10075
10076/** Opcode 0xf3 0x0f 0xae 11b/2. */
10077FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10078{
10079 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10080 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10081 {
10082 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
10083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10084 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10085 IEM_MC_ARG(uint64_t, u64Dst, 0);
10086 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10087 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10088 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10089 IEM_MC_ADVANCE_RIP_AND_FINISH();
10090 IEM_MC_END();
10091 }
10092 else
10093 {
10094 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10096 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10097 IEM_MC_ARG(uint32_t, u32Dst, 0);
10098 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10099 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10100 IEM_MC_ADVANCE_RIP_AND_FINISH();
10101 IEM_MC_END();
10102 }
10103}
10104
10105
10106/** Opcode 0xf3 0x0f 0xae 11b/3. */
10107FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10108{
10109 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10110 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10111 {
10112 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
10113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10114 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10115 IEM_MC_ARG(uint64_t, u64Dst, 0);
10116 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10117 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10118 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10119 IEM_MC_ADVANCE_RIP_AND_FINISH();
10120 IEM_MC_END();
10121 }
10122 else
10123 {
10124 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10126 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10127 IEM_MC_ARG(uint32_t, u32Dst, 0);
10128 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10129 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10130 IEM_MC_ADVANCE_RIP_AND_FINISH();
10131 IEM_MC_END();
10132 }
10133}
10134
10135
10136/**
10137 * Group 15 jump table for register variant.
10138 */
10139IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10140{ /* pfx: none, 066h, 0f3h, 0f2h */
10141 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10142 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10143 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10144 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10145 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10146 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10147 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10148 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10149};
10150AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10151
10152
10153/**
10154 * Group 15 jump table for memory variant.
10155 */
10156IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10157{ /* pfx: none, 066h, 0f3h, 0f2h */
10158 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10159 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10160 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10161 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10162 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10163 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10164 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10165 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10166};
10167AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10168
10169
10170/** Opcode 0x0f 0xae. */
10171FNIEMOP_DEF(iemOp_Grp15)
10172{
10173 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10175 if (IEM_IS_MODRM_REG_MODE(bRm))
10176 /* register, register */
10177 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10178 + pVCpu->iem.s.idxPrefix], bRm);
10179 /* memory, register */
10180 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10181 + pVCpu->iem.s.idxPrefix], bRm);
10182}
10183
10184
10185/** Opcode 0x0f 0xaf. */
10186FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10187{
10188 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10189 IEMOP_HLP_MIN_386();
10190 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10191 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10192 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_MIN_386);
10193}
10194
10195
10196/** Opcode 0x0f 0xb0. */
10197FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10198{
10199 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10200 IEMOP_HLP_MIN_486();
10201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10202
10203 if (IEM_IS_MODRM_REG_MODE(bRm))
10204 {
10205 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10206 IEMOP_HLP_DONE_DECODING();
10207 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10208 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10209 IEM_MC_ARG(uint8_t, u8Src, 2);
10210 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10211
10212 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10213 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10214 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10215 IEM_MC_REF_EFLAGS(pEFlags);
10216 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10217 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10218 else
10219 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10220
10221 IEM_MC_ADVANCE_RIP_AND_FINISH();
10222 IEM_MC_END();
10223 }
10224 else
10225 {
10226 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10227 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10228 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10229 IEM_MC_ARG(uint8_t, u8Src, 2);
10230 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10232 IEM_MC_LOCAL(uint8_t, u8Al);
10233 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10234
10235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10236 IEMOP_HLP_DONE_DECODING();
10237 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10238 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10239 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10240 IEM_MC_FETCH_EFLAGS(EFlags);
10241 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10242 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10243 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10244 else
10245 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10246
10247 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
10248 IEM_MC_COMMIT_EFLAGS(EFlags);
10249 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10250 IEM_MC_ADVANCE_RIP_AND_FINISH();
10251 IEM_MC_END();
10252 }
10253}
10254
10255/** Opcode 0x0f 0xb1. */
10256FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10257{
10258 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10259 IEMOP_HLP_MIN_486();
10260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10261
10262 if (IEM_IS_MODRM_REG_MODE(bRm))
10263 {
10264 switch (pVCpu->iem.s.enmEffOpSize)
10265 {
10266 case IEMMODE_16BIT:
10267 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10268 IEMOP_HLP_DONE_DECODING();
10269 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10270 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10271 IEM_MC_ARG(uint16_t, u16Src, 2);
10272 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10273
10274 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10275 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10276 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10277 IEM_MC_REF_EFLAGS(pEFlags);
10278 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10279 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10280 else
10281 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10282
10283 IEM_MC_ADVANCE_RIP_AND_FINISH();
10284 IEM_MC_END();
10285 break;
10286
10287 case IEMMODE_32BIT:
10288 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10289 IEMOP_HLP_DONE_DECODING();
10290 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10291 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10292 IEM_MC_ARG(uint32_t, u32Src, 2);
10293 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10294
10295 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10296 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10297 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10298 IEM_MC_REF_EFLAGS(pEFlags);
10299 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10300 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10301 else
10302 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10303
10304 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10305 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10306 } IEM_MC_ELSE() {
10307 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10308 } IEM_MC_ENDIF();
10309
10310 IEM_MC_ADVANCE_RIP_AND_FINISH();
10311 IEM_MC_END();
10312 break;
10313
10314 case IEMMODE_64BIT:
10315 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
10316 IEMOP_HLP_DONE_DECODING();
10317 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10318 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10319#ifdef RT_ARCH_X86
10320 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10321#else
10322 IEM_MC_ARG(uint64_t, u64Src, 2);
10323#endif
10324 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10325
10326 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10327 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10328 IEM_MC_REF_EFLAGS(pEFlags);
10329#ifdef RT_ARCH_X86
10330 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10331 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10332 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10333 else
10334 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10335#else
10336 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10337 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10338 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10339 else
10340 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10341#endif
10342
10343 IEM_MC_ADVANCE_RIP_AND_FINISH();
10344 IEM_MC_END();
10345 break;
10346
10347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10348 }
10349 }
10350 else
10351 {
10352 switch (pVCpu->iem.s.enmEffOpSize)
10353 {
10354 case IEMMODE_16BIT:
10355 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10356 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10357 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10358 IEM_MC_ARG(uint16_t, u16Src, 2);
10359 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10361 IEM_MC_LOCAL(uint16_t, u16Ax);
10362 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10363
10364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10365 IEMOP_HLP_DONE_DECODING();
10366 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10367 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10368 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10369 IEM_MC_FETCH_EFLAGS(EFlags);
10370 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10371 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10372 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10373 else
10374 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10375
10376 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
10377 IEM_MC_COMMIT_EFLAGS(EFlags);
10378 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10379 IEM_MC_ADVANCE_RIP_AND_FINISH();
10380 IEM_MC_END();
10381 break;
10382
10383 case IEMMODE_32BIT:
10384 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10385 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10386 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10387 IEM_MC_ARG(uint32_t, u32Src, 2);
10388 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10390 IEM_MC_LOCAL(uint32_t, u32Eax);
10391 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10392
10393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10394 IEMOP_HLP_DONE_DECODING();
10395 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10396 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10397 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10398 IEM_MC_FETCH_EFLAGS(EFlags);
10399 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10400 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10401 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10402 else
10403 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10404
10405 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
10406 IEM_MC_COMMIT_EFLAGS(EFlags);
10407
10408 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10409 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10410 } IEM_MC_ENDIF();
10411
10412 IEM_MC_ADVANCE_RIP_AND_FINISH();
10413 IEM_MC_END();
10414 break;
10415
10416 case IEMMODE_64BIT:
10417 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT, 0);
10418 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10419 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10420#ifdef RT_ARCH_X86
10421 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10422#else
10423 IEM_MC_ARG(uint64_t, u64Src, 2);
10424#endif
10425 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10427 IEM_MC_LOCAL(uint64_t, u64Rax);
10428 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10429
10430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10431 IEMOP_HLP_DONE_DECODING();
10432 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10433 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10434 IEM_MC_FETCH_EFLAGS(EFlags);
10435 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10436#ifdef RT_ARCH_X86
10437 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10438 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10439 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10440 else
10441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10442#else
10443 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10444 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10446 else
10447 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10448#endif
10449
10450 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
10451 IEM_MC_COMMIT_EFLAGS(EFlags);
10452 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10453 IEM_MC_ADVANCE_RIP_AND_FINISH();
10454 IEM_MC_END();
10455 break;
10456
10457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10458 }
10459 }
10460}
10461
10462
10463/** Opcode 0x0f 0xb2. */
10464FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10465{
10466 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10467 IEMOP_HLP_MIN_386();
10468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10469 if (IEM_IS_MODRM_REG_MODE(bRm))
10470 IEMOP_RAISE_INVALID_OPCODE_RET();
10471 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10472}
10473
10474
10475/** Opcode 0x0f 0xb3. */
10476FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10477{
10478 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10479 IEMOP_HLP_MIN_386();
10480 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10481 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10482}
10483
10484
10485/** Opcode 0x0f 0xb4. */
10486FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10487{
10488 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10489 IEMOP_HLP_MIN_386();
10490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10491 if (IEM_IS_MODRM_REG_MODE(bRm))
10492 IEMOP_RAISE_INVALID_OPCODE_RET();
10493 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10494}
10495
10496
10497/** Opcode 0x0f 0xb5. */
10498FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10499{
10500 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10501 IEMOP_HLP_MIN_386();
10502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10503 if (IEM_IS_MODRM_REG_MODE(bRm))
10504 IEMOP_RAISE_INVALID_OPCODE_RET();
10505 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10506}
10507
10508
10509/** Opcode 0x0f 0xb6. */
10510FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10511{
10512 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10513 IEMOP_HLP_MIN_386();
10514
10515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10516
10517 /*
10518 * If rm is denoting a register, no more instruction bytes.
10519 */
10520 if (IEM_IS_MODRM_REG_MODE(bRm))
10521 {
10522 switch (pVCpu->iem.s.enmEffOpSize)
10523 {
10524 case IEMMODE_16BIT:
10525 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10527 IEM_MC_LOCAL(uint16_t, u16Value);
10528 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10529 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10530 IEM_MC_ADVANCE_RIP_AND_FINISH();
10531 IEM_MC_END();
10532 break;
10533
10534 case IEMMODE_32BIT:
10535 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10537 IEM_MC_LOCAL(uint32_t, u32Value);
10538 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10539 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10540 IEM_MC_ADVANCE_RIP_AND_FINISH();
10541 IEM_MC_END();
10542 break;
10543
10544 case IEMMODE_64BIT:
10545 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10547 IEM_MC_LOCAL(uint64_t, u64Value);
10548 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10549 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10550 IEM_MC_ADVANCE_RIP_AND_FINISH();
10551 IEM_MC_END();
10552 break;
10553
10554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10555 }
10556 }
10557 else
10558 {
10559 /*
10560 * We're loading a register from memory.
10561 */
10562 switch (pVCpu->iem.s.enmEffOpSize)
10563 {
10564 case IEMMODE_16BIT:
10565 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10566 IEM_MC_LOCAL(uint16_t, u16Value);
10567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10570 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10571 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10572 IEM_MC_ADVANCE_RIP_AND_FINISH();
10573 IEM_MC_END();
10574 break;
10575
10576 case IEMMODE_32BIT:
10577 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10578 IEM_MC_LOCAL(uint32_t, u32Value);
10579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10582 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10583 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10584 IEM_MC_ADVANCE_RIP_AND_FINISH();
10585 IEM_MC_END();
10586 break;
10587
10588 case IEMMODE_64BIT:
10589 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10590 IEM_MC_LOCAL(uint64_t, u64Value);
10591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10594 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10595 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10596 IEM_MC_ADVANCE_RIP_AND_FINISH();
10597 IEM_MC_END();
10598 break;
10599
10600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10601 }
10602 }
10603}
10604
10605
10606/** Opcode 0x0f 0xb7. */
10607FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10608{
10609 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10610 IEMOP_HLP_MIN_386();
10611
10612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10613
10614 /** @todo Not entirely sure how the operand size prefix is handled here,
10615 * assuming that it will be ignored. Would be nice to have a few
10616 * test for this. */
10617
10618 /** @todo There should be no difference in the behaviour whether REX.W is
10619 * present or not... */
10620
10621 /*
10622 * If rm is denoting a register, no more instruction bytes.
10623 */
10624 if (IEM_IS_MODRM_REG_MODE(bRm))
10625 {
10626 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10627 {
10628 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10630 IEM_MC_LOCAL(uint32_t, u32Value);
10631 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10632 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10633 IEM_MC_ADVANCE_RIP_AND_FINISH();
10634 IEM_MC_END();
10635 }
10636 else
10637 {
10638 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10640 IEM_MC_LOCAL(uint64_t, u64Value);
10641 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10642 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10643 IEM_MC_ADVANCE_RIP_AND_FINISH();
10644 IEM_MC_END();
10645 }
10646 }
10647 else
10648 {
10649 /*
10650 * We're loading a register from memory.
10651 */
10652 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10653 {
10654 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10655 IEM_MC_LOCAL(uint32_t, u32Value);
10656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10660 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10661 IEM_MC_ADVANCE_RIP_AND_FINISH();
10662 IEM_MC_END();
10663 }
10664 else
10665 {
10666 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10667 IEM_MC_LOCAL(uint64_t, u64Value);
10668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10671 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10672 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10673 IEM_MC_ADVANCE_RIP_AND_FINISH();
10674 IEM_MC_END();
10675 }
10676 }
10677}
10678
10679
10680/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10681FNIEMOP_UD_STUB(iemOp_jmpe);
10682
10683
10684/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10685FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10686{
10687 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10688 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10689 return iemOp_InvalidNeedRM(pVCpu);
10690#ifndef TST_IEM_CHECK_MC
10691# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10692 static const IEMOPBINSIZES s_Native =
10693 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10694# endif
10695 static const IEMOPBINSIZES s_Fallback =
10696 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10697#endif
10698 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10699 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
10700}
10701
10702
10703/**
10704 * @opcode 0xb9
10705 * @opinvalid intel-modrm
10706 * @optest ->
10707 */
10708FNIEMOP_DEF(iemOp_Grp10)
10709{
10710 /*
10711 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10712 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10713 */
10714 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10715 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10716 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10717}
10718
10719
10720/**
10721 * Body for group 8 bit instruction.
10722 */
10723#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10724 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10725 \
10726 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10727 { \
10728 /* register destination. */ \
10729 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10730 \
10731 switch (pVCpu->iem.s.enmEffOpSize) \
10732 { \
10733 case IEMMODE_16BIT: \
10734 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10736 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10737 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10738 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10739 \
10740 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10741 IEM_MC_REF_EFLAGS(pEFlags); \
10742 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10743 \
10744 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10745 IEM_MC_END(); \
10746 break; \
10747 \
10748 case IEMMODE_32BIT: \
10749 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10751 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10752 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10753 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10754 \
10755 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10756 IEM_MC_REF_EFLAGS(pEFlags); \
10757 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10758 \
10759 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
10760 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10761 IEM_MC_END(); \
10762 break; \
10763 \
10764 case IEMMODE_64BIT: \
10765 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10767 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10768 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10769 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10770 \
10771 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10772 IEM_MC_REF_EFLAGS(pEFlags); \
10773 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10774 \
10775 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10776 IEM_MC_END(); \
10777 break; \
10778 \
10779 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10780 } \
10781 } \
10782 else \
10783 { \
10784 /* memory destination. */ \
10785 /** @todo test negative bit offsets! */ \
10786 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10787 { \
10788 switch (pVCpu->iem.s.enmEffOpSize) \
10789 { \
10790 case IEMMODE_16BIT: \
10791 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10792 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10793 IEM_MC_ARG(uint16_t, u16Src, 1); \
10794 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10796 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10797 \
10798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10799 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10800 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10801 IEMOP_HLP_DONE_DECODING(); \
10802 IEM_MC_FETCH_EFLAGS(EFlags); \
10803 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10804 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10805 \
10806 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10807 IEM_MC_COMMIT_EFLAGS(EFlags); \
10808 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10809 IEM_MC_END(); \
10810 break; \
10811 \
10812 case IEMMODE_32BIT: \
10813 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10814 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10815 IEM_MC_ARG(uint32_t, u32Src, 1); \
10816 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10818 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10819 \
10820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10821 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10822 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10823 IEMOP_HLP_DONE_DECODING(); \
10824 IEM_MC_FETCH_EFLAGS(EFlags); \
10825 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10826 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10827 \
10828 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10829 IEM_MC_COMMIT_EFLAGS(EFlags); \
10830 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10831 IEM_MC_END(); \
10832 break; \
10833 \
10834 case IEMMODE_64BIT: \
10835 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10836 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10837 IEM_MC_ARG(uint64_t, u64Src, 1); \
10838 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10840 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10841 \
10842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10843 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10844 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10845 IEMOP_HLP_DONE_DECODING(); \
10846 IEM_MC_FETCH_EFLAGS(EFlags); \
10847 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10848 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10849 \
10850 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10851 IEM_MC_COMMIT_EFLAGS(EFlags); \
10852 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10853 IEM_MC_END(); \
10854 break; \
10855 \
10856 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10857 } \
10858 } \
10859 else \
10860 { \
10861 (void)0
10862/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10863#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10864 switch (pVCpu->iem.s.enmEffOpSize) \
10865 { \
10866 case IEMMODE_16BIT: \
10867 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10868 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10869 IEM_MC_ARG(uint16_t, u16Src, 1); \
10870 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10872 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10873 \
10874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10875 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10876 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10877 IEMOP_HLP_DONE_DECODING(); \
10878 IEM_MC_FETCH_EFLAGS(EFlags); \
10879 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10880 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10881 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10882 \
10883 IEM_MC_COMMIT_EFLAGS(EFlags); \
10884 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10885 IEM_MC_END(); \
10886 break; \
10887 \
10888 case IEMMODE_32BIT: \
10889 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10890 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10891 IEM_MC_ARG(uint32_t, u32Src, 1); \
10892 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10894 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10895 \
10896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10897 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10898 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10899 IEMOP_HLP_DONE_DECODING(); \
10900 IEM_MC_FETCH_EFLAGS(EFlags); \
10901 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10902 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10903 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10904 \
10905 IEM_MC_COMMIT_EFLAGS(EFlags); \
10906 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10907 IEM_MC_END(); \
10908 break; \
10909 \
10910 case IEMMODE_64BIT: \
10911 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10912 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10913 IEM_MC_ARG(uint64_t, u64Src, 1); \
10914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10916 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10917 \
10918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10919 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10920 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10921 IEMOP_HLP_DONE_DECODING(); \
10922 IEM_MC_FETCH_EFLAGS(EFlags); \
10923 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10924 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10925 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10926 \
10927 IEM_MC_COMMIT_EFLAGS(EFlags); \
10928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10929 IEM_MC_END(); \
10930 break; \
10931 \
10932 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10933 } \
10934 } \
10935 } \
10936 (void)0
10937
10938/* Read-only version (bt) */
10939#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10940 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10941 \
10942 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10943 { \
10944 /* register destination. */ \
10945 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10946 \
10947 switch (pVCpu->iem.s.enmEffOpSize) \
10948 { \
10949 case IEMMODE_16BIT: \
10950 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10952 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10953 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10954 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10955 \
10956 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10957 IEM_MC_REF_EFLAGS(pEFlags); \
10958 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10959 \
10960 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10961 IEM_MC_END(); \
10962 break; \
10963 \
10964 case IEMMODE_32BIT: \
10965 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10967 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10968 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10969 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10970 \
10971 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10972 IEM_MC_REF_EFLAGS(pEFlags); \
10973 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10974 \
10975 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10976 IEM_MC_END(); \
10977 break; \
10978 \
10979 case IEMMODE_64BIT: \
10980 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10982 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10983 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10984 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10985 \
10986 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10987 IEM_MC_REF_EFLAGS(pEFlags); \
10988 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10989 \
10990 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10991 IEM_MC_END(); \
10992 break; \
10993 \
10994 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10995 } \
10996 } \
10997 else \
10998 { \
10999 /* memory destination. */ \
11000 /** @todo test negative bit offsets! */ \
11001 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11002 { \
11003 switch (pVCpu->iem.s.enmEffOpSize) \
11004 { \
11005 case IEMMODE_16BIT: \
11006 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11007 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11008 IEM_MC_ARG(uint16_t, u16Src, 1); \
11009 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11011 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11012 \
11013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11014 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11015 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
11016 IEMOP_HLP_DONE_DECODING(); \
11017 IEM_MC_FETCH_EFLAGS(EFlags); \
11018 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11019 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11020 \
11021 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
11022 IEM_MC_COMMIT_EFLAGS(EFlags); \
11023 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11024 IEM_MC_END(); \
11025 break; \
11026 \
11027 case IEMMODE_32BIT: \
11028 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11029 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11030 IEM_MC_ARG(uint32_t, u32Src, 1); \
11031 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11033 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11034 \
11035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11036 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11037 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
11038 IEMOP_HLP_DONE_DECODING(); \
11039 IEM_MC_FETCH_EFLAGS(EFlags); \
11040 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11041 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11042 \
11043 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
11044 IEM_MC_COMMIT_EFLAGS(EFlags); \
11045 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11046 IEM_MC_END(); \
11047 break; \
11048 \
11049 case IEMMODE_64BIT: \
11050 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11051 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11052 IEM_MC_ARG(uint64_t, u64Src, 1); \
11053 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11055 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11056 \
11057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11058 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11059 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
11060 IEMOP_HLP_DONE_DECODING(); \
11061 IEM_MC_FETCH_EFLAGS(EFlags); \
11062 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11063 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11064 \
11065 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
11066 IEM_MC_COMMIT_EFLAGS(EFlags); \
11067 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11068 IEM_MC_END(); \
11069 break; \
11070 \
11071 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11072 } \
11073 } \
11074 else \
11075 { \
11076 IEMOP_HLP_DONE_DECODING(); \
11077 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11078 } \
11079 } \
11080 (void)0
11081
11082
11083/** Opcode 0x0f 0xba /4. */
11084FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11085{
11086 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11087 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11088}
11089
11090
11091/** Opcode 0x0f 0xba /5. */
11092FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11093{
11094 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11095 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11096 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11097}
11098
11099
11100/** Opcode 0x0f 0xba /6. */
11101FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11102{
11103 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11104 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11105 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11106}
11107
11108
11109/** Opcode 0x0f 0xba /7. */
11110FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11111{
11112 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11113 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11114 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11115}
11116
11117
11118/** Opcode 0x0f 0xba. */
11119FNIEMOP_DEF(iemOp_Grp8)
11120{
11121 IEMOP_HLP_MIN_386();
11122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11123 switch (IEM_GET_MODRM_REG_8(bRm))
11124 {
11125 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11126 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11127 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11128 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11129
11130 case 0: case 1: case 2: case 3:
11131 /* Both AMD and Intel want full modr/m decoding and imm8. */
11132 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11133
11134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11135 }
11136}
11137
11138
11139/** Opcode 0x0f 0xbb. */
11140FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11141{
11142 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11143 IEMOP_HLP_MIN_386();
11144 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11145 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11146}
11147
11148
11149/**
11150 * Common worker for BSF and BSR instructions.
11151 *
11152 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11153 * the destination register, which means that for 32-bit operations the high
11154 * bits must be left alone.
11155 *
11156 * @param pImpl Pointer to the instruction implementation (assembly).
11157 */
11158FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
11159{
11160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11161
11162 /*
11163 * If rm is denoting a register, no more instruction bytes.
11164 */
11165 if (IEM_IS_MODRM_REG_MODE(bRm))
11166 {
11167 switch (pVCpu->iem.s.enmEffOpSize)
11168 {
11169 case IEMMODE_16BIT:
11170 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11172 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11173 IEM_MC_ARG(uint16_t, u16Src, 1);
11174 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11175
11176 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11177 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11178 IEM_MC_REF_EFLAGS(pEFlags);
11179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11180
11181 IEM_MC_ADVANCE_RIP_AND_FINISH();
11182 IEM_MC_END();
11183 break;
11184
11185 case IEMMODE_32BIT:
11186 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11189 IEM_MC_ARG(uint32_t, u32Src, 1);
11190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11191
11192 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11193 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11194 IEM_MC_REF_EFLAGS(pEFlags);
11195 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11196 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11197 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11198 } IEM_MC_ENDIF();
11199 IEM_MC_ADVANCE_RIP_AND_FINISH();
11200 IEM_MC_END();
11201 break;
11202
11203 case IEMMODE_64BIT:
11204 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11206 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11207 IEM_MC_ARG(uint64_t, u64Src, 1);
11208 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11209
11210 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11211 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11212 IEM_MC_REF_EFLAGS(pEFlags);
11213 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11214
11215 IEM_MC_ADVANCE_RIP_AND_FINISH();
11216 IEM_MC_END();
11217 break;
11218
11219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11220 }
11221 }
11222 else
11223 {
11224 /*
11225 * We're accessing memory.
11226 */
11227 switch (pVCpu->iem.s.enmEffOpSize)
11228 {
11229 case IEMMODE_16BIT:
11230 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11231 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11232 IEM_MC_ARG(uint16_t, u16Src, 1);
11233 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11235
11236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11238 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11239 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11240 IEM_MC_REF_EFLAGS(pEFlags);
11241 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11242
11243 IEM_MC_ADVANCE_RIP_AND_FINISH();
11244 IEM_MC_END();
11245 break;
11246
11247 case IEMMODE_32BIT:
11248 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11249 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11250 IEM_MC_ARG(uint32_t, u32Src, 1);
11251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11253
11254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11256 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11257 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11258 IEM_MC_REF_EFLAGS(pEFlags);
11259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11260
11261 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11262 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11263 } IEM_MC_ENDIF();
11264 IEM_MC_ADVANCE_RIP_AND_FINISH();
11265 IEM_MC_END();
11266 break;
11267
11268 case IEMMODE_64BIT:
11269 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
11270 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11271 IEM_MC_ARG(uint64_t, u64Src, 1);
11272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11274
11275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11277 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11278 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11279 IEM_MC_REF_EFLAGS(pEFlags);
11280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11281
11282 IEM_MC_ADVANCE_RIP_AND_FINISH();
11283 IEM_MC_END();
11284 break;
11285
11286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11287 }
11288 }
11289}
11290
11291
11292/** Opcode 0x0f 0xbc. */
11293FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11294{
11295 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11296 IEMOP_HLP_MIN_386();
11297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11298 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11299}
11300
11301
11302/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11303FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11304{
11305 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11306 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11307 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11308
11309#ifndef TST_IEM_CHECK_MC
11310 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11311 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11312 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11313 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11314 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11315 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11316 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11317 {
11318 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11319 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11320 };
11321#endif
11322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11323 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11324 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11325 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11326}
11327
11328
11329/** Opcode 0x0f 0xbd. */
11330FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11331{
11332 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11333 IEMOP_HLP_MIN_386();
11334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11335 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11336}
11337
11338
11339/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11340FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11341{
11342 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11343 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11344 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11345
11346#ifndef TST_IEM_CHECK_MC
11347 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11348 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11349 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11350 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11351 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11352 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11353 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11354 {
11355 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11356 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11357 };
11358#endif
11359 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11360 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11361 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11362 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11363}
11364
11365
11366
11367/** Opcode 0x0f 0xbe. */
11368FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11369{
11370 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11371 IEMOP_HLP_MIN_386();
11372
11373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11374
11375 /*
11376 * If rm is denoting a register, no more instruction bytes.
11377 */
11378 if (IEM_IS_MODRM_REG_MODE(bRm))
11379 {
11380 switch (pVCpu->iem.s.enmEffOpSize)
11381 {
11382 case IEMMODE_16BIT:
11383 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11385 IEM_MC_LOCAL(uint16_t, u16Value);
11386 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11387 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11388 IEM_MC_ADVANCE_RIP_AND_FINISH();
11389 IEM_MC_END();
11390 break;
11391
11392 case IEMMODE_32BIT:
11393 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11395 IEM_MC_LOCAL(uint32_t, u32Value);
11396 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11397 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11398 IEM_MC_ADVANCE_RIP_AND_FINISH();
11399 IEM_MC_END();
11400 break;
11401
11402 case IEMMODE_64BIT:
11403 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11405 IEM_MC_LOCAL(uint64_t, u64Value);
11406 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11407 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11408 IEM_MC_ADVANCE_RIP_AND_FINISH();
11409 IEM_MC_END();
11410 break;
11411
11412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11413 }
11414 }
11415 else
11416 {
11417 /*
11418 * We're loading a register from memory.
11419 */
11420 switch (pVCpu->iem.s.enmEffOpSize)
11421 {
11422 case IEMMODE_16BIT:
11423 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11424 IEM_MC_LOCAL(uint16_t, u16Value);
11425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11428 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11429 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11430 IEM_MC_ADVANCE_RIP_AND_FINISH();
11431 IEM_MC_END();
11432 break;
11433
11434 case IEMMODE_32BIT:
11435 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11436 IEM_MC_LOCAL(uint32_t, u32Value);
11437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11440 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11441 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11442 IEM_MC_ADVANCE_RIP_AND_FINISH();
11443 IEM_MC_END();
11444 break;
11445
11446 case IEMMODE_64BIT:
11447 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11448 IEM_MC_LOCAL(uint64_t, u64Value);
11449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11452 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11453 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11454 IEM_MC_ADVANCE_RIP_AND_FINISH();
11455 IEM_MC_END();
11456 break;
11457
11458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11459 }
11460 }
11461}
11462
11463
11464/** Opcode 0x0f 0xbf. */
11465FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11466{
11467 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11468 IEMOP_HLP_MIN_386();
11469
11470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11471
11472 /** @todo Not entirely sure how the operand size prefix is handled here,
11473 * assuming that it will be ignored. Would be nice to have a few
11474 * test for this. */
11475 /*
11476 * If rm is denoting a register, no more instruction bytes.
11477 */
11478 if (IEM_IS_MODRM_REG_MODE(bRm))
11479 {
11480 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11481 {
11482 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11484 IEM_MC_LOCAL(uint32_t, u32Value);
11485 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11486 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11487 IEM_MC_ADVANCE_RIP_AND_FINISH();
11488 IEM_MC_END();
11489 }
11490 else
11491 {
11492 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11494 IEM_MC_LOCAL(uint64_t, u64Value);
11495 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11496 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11497 IEM_MC_ADVANCE_RIP_AND_FINISH();
11498 IEM_MC_END();
11499 }
11500 }
11501 else
11502 {
11503 /*
11504 * We're loading a register from memory.
11505 */
11506 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11507 {
11508 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11509 IEM_MC_LOCAL(uint32_t, u32Value);
11510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11513 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11515 IEM_MC_ADVANCE_RIP_AND_FINISH();
11516 IEM_MC_END();
11517 }
11518 else
11519 {
11520 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11521 IEM_MC_LOCAL(uint64_t, u64Value);
11522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11525 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11526 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11527 IEM_MC_ADVANCE_RIP_AND_FINISH();
11528 IEM_MC_END();
11529 }
11530 }
11531}
11532
11533
11534/** Opcode 0x0f 0xc0. */
11535FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11536{
11537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11538 IEMOP_HLP_MIN_486();
11539 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11540
11541 /*
11542 * If rm is denoting a register, no more instruction bytes.
11543 */
11544 if (IEM_IS_MODRM_REG_MODE(bRm))
11545 {
11546 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11548 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11549 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11550 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11551
11552 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11553 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11554 IEM_MC_REF_EFLAGS(pEFlags);
11555 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11556
11557 IEM_MC_ADVANCE_RIP_AND_FINISH();
11558 IEM_MC_END();
11559 }
11560 else
11561 {
11562 /*
11563 * We're accessing memory.
11564 */
11565 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11566 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11567 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11568 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11569 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11571 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11572
11573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11574 IEMOP_HLP_DONE_DECODING();
11575 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11576 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11577 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11578 IEM_MC_FETCH_EFLAGS(EFlags);
11579 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11581 else
11582 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11583
11584 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
11585 IEM_MC_COMMIT_EFLAGS(EFlags);
11586 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11587 IEM_MC_ADVANCE_RIP_AND_FINISH();
11588 IEM_MC_END();
11589 }
11590}
11591
11592
11593/** Opcode 0x0f 0xc1. */
11594FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11595{
11596 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11597 IEMOP_HLP_MIN_486();
11598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11599
11600 /*
11601 * If rm is denoting a register, no more instruction bytes.
11602 */
11603 if (IEM_IS_MODRM_REG_MODE(bRm))
11604 {
11605 switch (pVCpu->iem.s.enmEffOpSize)
11606 {
11607 case IEMMODE_16BIT:
11608 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11610 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11611 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11613
11614 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11615 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11616 IEM_MC_REF_EFLAGS(pEFlags);
11617 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11618
11619 IEM_MC_ADVANCE_RIP_AND_FINISH();
11620 IEM_MC_END();
11621 break;
11622
11623 case IEMMODE_32BIT:
11624 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11626 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11627 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11628 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11629
11630 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11631 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11632 IEM_MC_REF_EFLAGS(pEFlags);
11633 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11634
11635 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11636 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11637 IEM_MC_ADVANCE_RIP_AND_FINISH();
11638 IEM_MC_END();
11639 break;
11640
11641 case IEMMODE_64BIT:
11642 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11644 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11645 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11646 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11647
11648 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11649 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11650 IEM_MC_REF_EFLAGS(pEFlags);
11651 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11652
11653 IEM_MC_ADVANCE_RIP_AND_FINISH();
11654 IEM_MC_END();
11655 break;
11656
11657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11658 }
11659 }
11660 else
11661 {
11662 /*
11663 * We're accessing memory.
11664 */
11665 switch (pVCpu->iem.s.enmEffOpSize)
11666 {
11667 case IEMMODE_16BIT:
11668 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11669 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11670 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11671 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11672 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11674 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11675
11676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11677 IEMOP_HLP_DONE_DECODING();
11678 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11679 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11680 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11681 IEM_MC_FETCH_EFLAGS(EFlags);
11682 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11683 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11684 else
11685 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11686
11687 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
11688 IEM_MC_COMMIT_EFLAGS(EFlags);
11689 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11690 IEM_MC_ADVANCE_RIP_AND_FINISH();
11691 IEM_MC_END();
11692 break;
11693
11694 case IEMMODE_32BIT:
11695 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11696 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11697 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11698 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11699 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11701 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11702
11703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11704 IEMOP_HLP_DONE_DECODING();
11705 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11706 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11707 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11708 IEM_MC_FETCH_EFLAGS(EFlags);
11709 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11710 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11711 else
11712 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11713
11714 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
11715 IEM_MC_COMMIT_EFLAGS(EFlags);
11716 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11717 IEM_MC_ADVANCE_RIP_AND_FINISH();
11718 IEM_MC_END();
11719 break;
11720
11721 case IEMMODE_64BIT:
11722 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
11723 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11724 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11725 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11726 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11728 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11729
11730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11731 IEMOP_HLP_DONE_DECODING();
11732 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11733 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11734 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11735 IEM_MC_FETCH_EFLAGS(EFlags);
11736 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11737 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11738 else
11739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11740
11741 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
11742 IEM_MC_COMMIT_EFLAGS(EFlags);
11743 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11744 IEM_MC_ADVANCE_RIP_AND_FINISH();
11745 IEM_MC_END();
11746 break;
11747
11748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11749 }
11750 }
11751}
11752
11753
11754/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11755FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11756{
11757 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11758
11759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11760 if (IEM_IS_MODRM_REG_MODE(bRm))
11761 {
11762 /*
11763 * XMM, XMM.
11764 */
11765 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11766 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11768 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11769 IEM_MC_LOCAL(X86XMMREG, Dst);
11770 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11771 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11772 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11773 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11774 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11775 IEM_MC_PREPARE_SSE_USAGE();
11776 IEM_MC_REF_MXCSR(pfMxcsr);
11777 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11778 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11779 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11780 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11781 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11782 } IEM_MC_ELSE() {
11783 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11784 } IEM_MC_ENDIF();
11785
11786 IEM_MC_ADVANCE_RIP_AND_FINISH();
11787 IEM_MC_END();
11788 }
11789 else
11790 {
11791 /*
11792 * XMM, [mem128].
11793 */
11794 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11795 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11796 IEM_MC_LOCAL(X86XMMREG, Dst);
11797 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11798 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11799 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11801
11802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11803 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11804 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11806 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11807 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11808
11809 IEM_MC_PREPARE_SSE_USAGE();
11810 IEM_MC_REF_MXCSR(pfMxcsr);
11811 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11812 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11813 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11814 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11815 } IEM_MC_ELSE() {
11816 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11817 } IEM_MC_ENDIF();
11818
11819 IEM_MC_ADVANCE_RIP_AND_FINISH();
11820 IEM_MC_END();
11821 }
11822}
11823
11824
11825/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11826FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11827{
11828 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11829
11830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11831 if (IEM_IS_MODRM_REG_MODE(bRm))
11832 {
11833 /*
11834 * XMM, XMM.
11835 */
11836 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11837 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11839 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11840 IEM_MC_LOCAL(X86XMMREG, Dst);
11841 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11842 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11843 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11844 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11845 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11846 IEM_MC_PREPARE_SSE_USAGE();
11847 IEM_MC_REF_MXCSR(pfMxcsr);
11848 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11849 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11850 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11851 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11852 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11853 } IEM_MC_ELSE() {
11854 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11855 } IEM_MC_ENDIF();
11856
11857 IEM_MC_ADVANCE_RIP_AND_FINISH();
11858 IEM_MC_END();
11859 }
11860 else
11861 {
11862 /*
11863 * XMM, [mem128].
11864 */
11865 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11866 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11867 IEM_MC_LOCAL(X86XMMREG, Dst);
11868 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11869 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11870 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11872
11873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11874 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11875 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11877 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11878 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11879
11880 IEM_MC_PREPARE_SSE_USAGE();
11881 IEM_MC_REF_MXCSR(pfMxcsr);
11882 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11883 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11884 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11885 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11886 } IEM_MC_ELSE() {
11887 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11888 } IEM_MC_ENDIF();
11889
11890 IEM_MC_ADVANCE_RIP_AND_FINISH();
11891 IEM_MC_END();
11892 }
11893}
11894
11895
11896/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11897FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11898{
11899 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11900
11901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11902 if (IEM_IS_MODRM_REG_MODE(bRm))
11903 {
11904 /*
11905 * XMM32, XMM32.
11906 */
11907 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11908 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11910 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11911 IEM_MC_LOCAL(X86XMMREG, Dst);
11912 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11913 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11914 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11915 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11916 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11917 IEM_MC_PREPARE_SSE_USAGE();
11918 IEM_MC_REF_MXCSR(pfMxcsr);
11919 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11920 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11921 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11922 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11923 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11924 } IEM_MC_ELSE() {
11925 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11926 } IEM_MC_ENDIF();
11927
11928 IEM_MC_ADVANCE_RIP_AND_FINISH();
11929 IEM_MC_END();
11930 }
11931 else
11932 {
11933 /*
11934 * XMM32, [mem32].
11935 */
11936 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11937 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11938 IEM_MC_LOCAL(X86XMMREG, Dst);
11939 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11940 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11941 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11943
11944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11945 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11946 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11948 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11949 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11950
11951 IEM_MC_PREPARE_SSE_USAGE();
11952 IEM_MC_REF_MXCSR(pfMxcsr);
11953 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11954 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11955 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11956 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11957 } IEM_MC_ELSE() {
11958 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11959 } IEM_MC_ENDIF();
11960
11961 IEM_MC_ADVANCE_RIP_AND_FINISH();
11962 IEM_MC_END();
11963 }
11964}
11965
11966
11967/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11968FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11969{
11970 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11971
11972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11973 if (IEM_IS_MODRM_REG_MODE(bRm))
11974 {
11975 /*
11976 * XMM64, XMM64.
11977 */
11978 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11979 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11981 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11982 IEM_MC_LOCAL(X86XMMREG, Dst);
11983 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11984 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11985 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11986 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11988 IEM_MC_PREPARE_SSE_USAGE();
11989 IEM_MC_REF_MXCSR(pfMxcsr);
11990 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11991 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11992 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11993 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11994 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11995 } IEM_MC_ELSE() {
11996 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11997 } IEM_MC_ENDIF();
11998
11999 IEM_MC_ADVANCE_RIP_AND_FINISH();
12000 IEM_MC_END();
12001 }
12002 else
12003 {
12004 /*
12005 * XMM64, [mem64].
12006 */
12007 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12008 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12009 IEM_MC_LOCAL(X86XMMREG, Dst);
12010 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12011 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12012 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12014
12015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12016 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12017 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12020 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12021
12022 IEM_MC_PREPARE_SSE_USAGE();
12023 IEM_MC_REF_MXCSR(pfMxcsr);
12024 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
12025 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12026 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12027 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12028 } IEM_MC_ELSE() {
12029 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12030 } IEM_MC_ENDIF();
12031
12032 IEM_MC_ADVANCE_RIP_AND_FINISH();
12033 IEM_MC_END();
12034 }
12035}
12036
12037
12038/** Opcode 0x0f 0xc3. */
12039FNIEMOP_DEF(iemOp_movnti_My_Gy)
12040{
12041 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12042
12043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12044
12045 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12046 if (IEM_IS_MODRM_MEM_MODE(bRm))
12047 {
12048 switch (pVCpu->iem.s.enmEffOpSize)
12049 {
12050 case IEMMODE_32BIT:
12051 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
12052 IEM_MC_LOCAL(uint32_t, u32Value);
12053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12054
12055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12057
12058 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12059 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12060 IEM_MC_ADVANCE_RIP_AND_FINISH();
12061 IEM_MC_END();
12062 break;
12063
12064 case IEMMODE_64BIT:
12065 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
12066 IEM_MC_LOCAL(uint64_t, u64Value);
12067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12068
12069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12071
12072 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12073 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12074 IEM_MC_ADVANCE_RIP_AND_FINISH();
12075 IEM_MC_END();
12076 break;
12077
12078 case IEMMODE_16BIT:
12079 /** @todo check this form. */
12080 IEMOP_RAISE_INVALID_OPCODE_RET();
12081
12082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12083 }
12084 }
12085 else
12086 IEMOP_RAISE_INVALID_OPCODE_RET();
12087}
12088
12089
12090/* Opcode 0x66 0x0f 0xc3 - invalid */
12091/* Opcode 0xf3 0x0f 0xc3 - invalid */
12092/* Opcode 0xf2 0x0f 0xc3 - invalid */
12093
12094
12095/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12096FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12097{
12098 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12100 if (IEM_IS_MODRM_REG_MODE(bRm))
12101 {
12102 /*
12103 * Register, register.
12104 */
12105 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12106 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12109 IEM_MC_ARG(uint16_t, u16Src, 1);
12110 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12111 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12112 IEM_MC_PREPARE_FPU_USAGE();
12113 IEM_MC_FPU_TO_MMX_MODE();
12114 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12115 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12116 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12117 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12118 IEM_MC_ADVANCE_RIP_AND_FINISH();
12119 IEM_MC_END();
12120 }
12121 else
12122 {
12123 /*
12124 * Register, memory.
12125 */
12126 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12127 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12128 IEM_MC_ARG(uint16_t, u16Src, 1);
12129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12130
12131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12132 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12133 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12135 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12136 IEM_MC_PREPARE_FPU_USAGE();
12137 IEM_MC_FPU_TO_MMX_MODE();
12138
12139 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12140 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12141 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12142 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12143 IEM_MC_ADVANCE_RIP_AND_FINISH();
12144 IEM_MC_END();
12145 }
12146}
12147
12148
12149/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12150FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12151{
12152 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12154 if (IEM_IS_MODRM_REG_MODE(bRm))
12155 {
12156 /*
12157 * Register, register.
12158 */
12159 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12160 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12162 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12163 IEM_MC_ARG(uint16_t, u16Src, 1);
12164 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12165 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12166 IEM_MC_PREPARE_SSE_USAGE();
12167 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12168 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12170 IEM_MC_ADVANCE_RIP_AND_FINISH();
12171 IEM_MC_END();
12172 }
12173 else
12174 {
12175 /*
12176 * Register, memory.
12177 */
12178 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12179 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12180 IEM_MC_ARG(uint16_t, u16Src, 1);
12181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12182
12183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12184 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12185 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12187 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12188 IEM_MC_PREPARE_SSE_USAGE();
12189
12190 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12191 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12192 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12193 IEM_MC_ADVANCE_RIP_AND_FINISH();
12194 IEM_MC_END();
12195 }
12196}
12197
12198
12199/* Opcode 0xf3 0x0f 0xc4 - invalid */
12200/* Opcode 0xf2 0x0f 0xc4 - invalid */
12201
12202
12203/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12204FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12205{
12206 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12208 if (IEM_IS_MODRM_REG_MODE(bRm))
12209 {
12210 /*
12211 * Greg32, MMX, imm8.
12212 */
12213 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12214 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12216 IEM_MC_LOCAL(uint16_t, u16Dst);
12217 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12218 IEM_MC_ARG(uint64_t, u64Src, 1);
12219 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12220 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12221 IEM_MC_PREPARE_FPU_USAGE();
12222 IEM_MC_FPU_TO_MMX_MODE();
12223 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12224 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12225 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12226 IEM_MC_ADVANCE_RIP_AND_FINISH();
12227 IEM_MC_END();
12228 }
12229 /* No memory operand. */
12230 else
12231 IEMOP_RAISE_INVALID_OPCODE_RET();
12232}
12233
12234
12235/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12236FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12237{
12238 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12240 if (IEM_IS_MODRM_REG_MODE(bRm))
12241 {
12242 /*
12243 * Greg32, XMM, imm8.
12244 */
12245 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12246 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12248 IEM_MC_LOCAL(uint16_t, u16Dst);
12249 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12250 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12251 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12252 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12253 IEM_MC_PREPARE_SSE_USAGE();
12254 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12255 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12256 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12257 IEM_MC_ADVANCE_RIP_AND_FINISH();
12258 IEM_MC_END();
12259 }
12260 /* No memory operand. */
12261 else
12262 IEMOP_RAISE_INVALID_OPCODE_RET();
12263}
12264
12265
12266/* Opcode 0xf3 0x0f 0xc5 - invalid */
12267/* Opcode 0xf2 0x0f 0xc5 - invalid */
12268
12269
12270/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12271FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12272{
12273 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12275 if (IEM_IS_MODRM_REG_MODE(bRm))
12276 {
12277 /*
12278 * XMM, XMM, imm8.
12279 */
12280 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12281 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12283 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12284 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12285 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12287 IEM_MC_PREPARE_SSE_USAGE();
12288 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12289 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12290 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12291 IEM_MC_ADVANCE_RIP_AND_FINISH();
12292 IEM_MC_END();
12293 }
12294 else
12295 {
12296 /*
12297 * XMM, [mem128], imm8.
12298 */
12299 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12300 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12301 IEM_MC_LOCAL(RTUINT128U, uSrc);
12302 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12304
12305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12306 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12307 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12310 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12311
12312 IEM_MC_PREPARE_SSE_USAGE();
12313 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12314 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12315
12316 IEM_MC_ADVANCE_RIP_AND_FINISH();
12317 IEM_MC_END();
12318 }
12319}
12320
12321
12322/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12323FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12324{
12325 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12327 if (IEM_IS_MODRM_REG_MODE(bRm))
12328 {
12329 /*
12330 * XMM, XMM, imm8.
12331 */
12332 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12333 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12335 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12336 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12337 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12339 IEM_MC_PREPARE_SSE_USAGE();
12340 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12341 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12342 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12343 IEM_MC_ADVANCE_RIP_AND_FINISH();
12344 IEM_MC_END();
12345 }
12346 else
12347 {
12348 /*
12349 * XMM, [mem128], imm8.
12350 */
12351 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12352 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12353 IEM_MC_LOCAL(RTUINT128U, uSrc);
12354 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12356
12357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12358 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12359 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12361 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12362 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12363
12364 IEM_MC_PREPARE_SSE_USAGE();
12365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12366 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12367
12368 IEM_MC_ADVANCE_RIP_AND_FINISH();
12369 IEM_MC_END();
12370 }
12371}
12372
12373
12374/* Opcode 0xf3 0x0f 0xc6 - invalid */
12375/* Opcode 0xf2 0x0f 0xc6 - invalid */
12376
12377
12378/** Opcode 0x0f 0xc7 !11/1. */
12379FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12380{
12381 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12382
12383 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
12384 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12385 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12386 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12387 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12388 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12389 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12391 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12392
12393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12394 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b);
12395 IEM_MC_MEM_MAP_U64_RW(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12396
12397 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12398 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12399 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12400
12401 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12402 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12403 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12404
12405 IEM_MC_FETCH_EFLAGS(EFlags);
12406 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12407 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12408 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12409 else
12410 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12411
12412 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64MemDst, bUnmapInfo);
12413 IEM_MC_COMMIT_EFLAGS(EFlags);
12414 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12415 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12416 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12417 } IEM_MC_ENDIF();
12418 IEM_MC_ADVANCE_RIP_AND_FINISH();
12419
12420 IEM_MC_END();
12421}
12422
12423
12424/** Opcode REX.W 0x0f 0xc7 !11/1. */
12425FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12426{
12427 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12428 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12429 {
12430 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
12431 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12432 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12433 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12435 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12436 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12438
12439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12440 IEMOP_HLP_DONE_DECODING();
12441 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12442 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12443
12444 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12445 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12446 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12447
12448 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12449 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12450 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12451
12452 IEM_MC_FETCH_EFLAGS(EFlags);
12453
12454#ifdef RT_ARCH_AMD64 /* some code duplication here because IEMAllInstPython.py cannot parse if/else/#if spaghetti. */
12455 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12456 {
12457 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12458 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12459 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12460 else
12461 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12462 }
12463 else
12464 { /* (see comments in #else case below) */
12465 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12466 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12467 else
12468 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12469 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12470 }
12471
12472#elif defined(RT_ARCH_ARM64)
12473 /** @todo may require fallback for unaligned accesses... */
12474 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12475 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12476 else
12477 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12478
12479#else
12480 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12481 accesses and not all all atomic, which works fine on in UNI CPU guest
12482 configuration (ignoring DMA). If guest SMP is active we have no choice
12483 but to use a rendezvous callback here. Sigh. */
12484 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12485 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12486 else
12487 {
12488 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12489 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12490 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12491 }
12492#endif
12493
12494 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12495 IEM_MC_COMMIT_EFLAGS(EFlags);
12496 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12497 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12498 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12499 } IEM_MC_ENDIF();
12500 IEM_MC_ADVANCE_RIP_AND_FINISH();
12501
12502 IEM_MC_END();
12503 }
12504 Log(("cmpxchg16b -> #UD\n"));
12505 IEMOP_RAISE_INVALID_OPCODE_RET();
12506}
12507
12508FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12509{
12510 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12511 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12512 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12513}
12514
12515
12516/** Opcode 0x0f 0xc7 11/6. */
12517FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12518{
12519 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12520 IEMOP_RAISE_INVALID_OPCODE_RET();
12521
12522 if (IEM_IS_MODRM_REG_MODE(bRm))
12523 {
12524 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12526 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12527 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 1);
12528 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_rdrand, iReg, enmEffOpSize);
12529 IEM_MC_END();
12530 }
12531 /* Register only. */
12532 else
12533 IEMOP_RAISE_INVALID_OPCODE_RET();
12534}
12535
12536/** Opcode 0x0f 0xc7 !11/6. */
12537#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12538FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12539{
12540 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12541 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12542 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12543 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12544 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12545 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12547 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12548 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12549 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12550 IEM_MC_END();
12551}
12552#else
12553FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12554#endif
12555
12556/** Opcode 0x66 0x0f 0xc7 !11/6. */
12557#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12558FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12559{
12560 IEMOP_MNEMONIC(vmclear, "vmclear");
12561 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12562 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12563 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12564 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12565 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12567 IEMOP_HLP_DONE_DECODING();
12568 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12569 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12570 IEM_MC_END();
12571}
12572#else
12573FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12574#endif
12575
12576/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12577#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12578FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12579{
12580 IEMOP_MNEMONIC(vmxon, "vmxon");
12581 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12582 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12583 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12584 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12586 IEMOP_HLP_DONE_DECODING();
12587 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12588 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12589 IEM_MC_END();
12590}
12591#else
12592FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12593#endif
12594
12595/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12596#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12597FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12598{
12599 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12600 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12601 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12602 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12603 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12604 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12606 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12607 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12608 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12609 IEM_MC_END();
12610}
12611#else
12612FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12613#endif
12614
12615/** Opcode 0x0f 0xc7 11/7. */
12616FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12617{
12618 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12619 IEMOP_RAISE_INVALID_OPCODE_RET();
12620
12621 if (IEM_IS_MODRM_REG_MODE(bRm))
12622 {
12623 /* register destination. */
12624 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12626 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12627 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 1);
12628 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_rdseed, iReg, enmEffOpSize);
12629 IEM_MC_END();
12630 }
12631 /* Register only. */
12632 else
12633 IEMOP_RAISE_INVALID_OPCODE_RET();
12634}
12635
12636/**
12637 * Group 9 jump table for register variant.
12638 */
12639IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12640{ /* pfx: none, 066h, 0f3h, 0f2h */
12641 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12642 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12643 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12644 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12645 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12646 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12647 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12648 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12649};
12650AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12651
12652
12653/**
12654 * Group 9 jump table for memory variant.
12655 */
12656IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12657{ /* pfx: none, 066h, 0f3h, 0f2h */
12658 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12659 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12660 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12661 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12662 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12663 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12664 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12665 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12666};
12667AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12668
12669
12670/** Opcode 0x0f 0xc7. */
12671FNIEMOP_DEF(iemOp_Grp9)
12672{
12673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12674 if (IEM_IS_MODRM_REG_MODE(bRm))
12675 /* register, register */
12676 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12677 + pVCpu->iem.s.idxPrefix], bRm);
12678 /* memory, register */
12679 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12680 + pVCpu->iem.s.idxPrefix], bRm);
12681}
12682
12683
12684/**
12685 * Common 'bswap register' helper.
12686 */
12687FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12688{
12689 switch (pVCpu->iem.s.enmEffOpSize)
12690 {
12691 case IEMMODE_16BIT:
12692 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12694 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12695 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12696 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12697 IEM_MC_ADVANCE_RIP_AND_FINISH();
12698 IEM_MC_END();
12699 break;
12700
12701 case IEMMODE_32BIT:
12702 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12704 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12705 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12706 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12707 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12708 IEM_MC_ADVANCE_RIP_AND_FINISH();
12709 IEM_MC_END();
12710 break;
12711
12712 case IEMMODE_64BIT:
12713 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
12714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12715 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12716 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12717 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12718 IEM_MC_ADVANCE_RIP_AND_FINISH();
12719 IEM_MC_END();
12720 break;
12721
12722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12723 }
12724}
12725
12726
12727/** Opcode 0x0f 0xc8. */
12728FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12729{
12730 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12731 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12732 prefix. REX.B is the correct prefix it appears. For a parallel
12733 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12734 IEMOP_HLP_MIN_486();
12735 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12736}
12737
12738
12739/** Opcode 0x0f 0xc9. */
12740FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12741{
12742 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12743 IEMOP_HLP_MIN_486();
12744 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12745}
12746
12747
12748/** Opcode 0x0f 0xca. */
12749FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12750{
12751 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12752 IEMOP_HLP_MIN_486();
12753 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12754}
12755
12756
12757/** Opcode 0x0f 0xcb. */
12758FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12759{
12760 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12761 IEMOP_HLP_MIN_486();
12762 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12763}
12764
12765
12766/** Opcode 0x0f 0xcc. */
12767FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12768{
12769 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12770 IEMOP_HLP_MIN_486();
12771 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12772}
12773
12774
12775/** Opcode 0x0f 0xcd. */
12776FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12777{
12778 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12779 IEMOP_HLP_MIN_486();
12780 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12781}
12782
12783
12784/** Opcode 0x0f 0xce. */
12785FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12786{
12787 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12788 IEMOP_HLP_MIN_486();
12789 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12790}
12791
12792
12793/** Opcode 0x0f 0xcf. */
12794FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12795{
12796 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12797 IEMOP_HLP_MIN_486();
12798 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12799}
12800
12801
12802/* Opcode 0x0f 0xd0 - invalid */
12803
12804
12805/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12806FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12807{
12808 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12809 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12810}
12811
12812
12813/* Opcode 0xf3 0x0f 0xd0 - invalid */
12814
12815
12816/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12817FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12818{
12819 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12820 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12821}
12822
12823
12824
12825/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12826FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12827{
12828 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12829 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12830}
12831
12832/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12833FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12834{
12835 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12836 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12837}
12838
12839/* Opcode 0xf3 0x0f 0xd1 - invalid */
12840/* Opcode 0xf2 0x0f 0xd1 - invalid */
12841
12842/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12843FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12844{
12845 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12846 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12847}
12848
12849
12850/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12851FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12852{
12853 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12854 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12855}
12856
12857
12858/* Opcode 0xf3 0x0f 0xd2 - invalid */
12859/* Opcode 0xf2 0x0f 0xd2 - invalid */
12860
12861/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12862FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12863{
12864 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12865 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12866}
12867
12868
12869/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12870FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12871{
12872 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12873 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12874}
12875
12876
12877/* Opcode 0xf3 0x0f 0xd3 - invalid */
12878/* Opcode 0xf2 0x0f 0xd3 - invalid */
12879
12880
12881/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12882FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12883{
12884 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12885 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12886}
12887
12888
12889/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12890FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12891{
12892 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12893 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12894}
12895
12896
12897/* Opcode 0xf3 0x0f 0xd4 - invalid */
12898/* Opcode 0xf2 0x0f 0xd4 - invalid */
12899
12900/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12901FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12902{
12903 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12904 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12905}
12906
12907/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12908FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12909{
12910 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12911 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12912}
12913
12914
12915/* Opcode 0xf3 0x0f 0xd5 - invalid */
12916/* Opcode 0xf2 0x0f 0xd5 - invalid */
12917
12918/* Opcode 0x0f 0xd6 - invalid */
12919
12920/**
12921 * @opcode 0xd6
12922 * @oppfx 0x66
12923 * @opcpuid sse2
12924 * @opgroup og_sse2_pcksclr_datamove
12925 * @opxcpttype none
12926 * @optest op1=-1 op2=2 -> op1=2
12927 * @optest op1=0 op2=-42 -> op1=-42
12928 */
12929FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12930{
12931 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12932 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12933 if (IEM_IS_MODRM_REG_MODE(bRm))
12934 {
12935 /*
12936 * Register, register.
12937 */
12938 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12940 IEM_MC_LOCAL(uint64_t, uSrc);
12941
12942 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12943 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12944
12945 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12946 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12947
12948 IEM_MC_ADVANCE_RIP_AND_FINISH();
12949 IEM_MC_END();
12950 }
12951 else
12952 {
12953 /*
12954 * Memory, register.
12955 */
12956 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12957 IEM_MC_LOCAL(uint64_t, uSrc);
12958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12959
12960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12962 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12963 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12964
12965 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12966 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12967
12968 IEM_MC_ADVANCE_RIP_AND_FINISH();
12969 IEM_MC_END();
12970 }
12971}
12972
12973
12974/**
12975 * @opcode 0xd6
12976 * @opcodesub 11 mr/reg
12977 * @oppfx f3
12978 * @opcpuid sse2
12979 * @opgroup og_sse2_simdint_datamove
12980 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
12981 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
12982 */
12983FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
12984{
12985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12986 if (IEM_IS_MODRM_REG_MODE(bRm))
12987 {
12988 /*
12989 * Register, register.
12990 */
12991 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12992 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12994 IEM_MC_LOCAL(uint64_t, uSrc);
12995
12996 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12997 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12998 IEM_MC_FPU_TO_MMX_MODE();
12999
13000 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13001 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13002
13003 IEM_MC_ADVANCE_RIP_AND_FINISH();
13004 IEM_MC_END();
13005 }
13006
13007 /**
13008 * @opdone
13009 * @opmnemonic udf30fd6mem
13010 * @opcode 0xd6
13011 * @opcodesub !11 mr/reg
13012 * @oppfx f3
13013 * @opunused intel-modrm
13014 * @opcpuid sse
13015 * @optest ->
13016 */
13017 else
13018 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13019}
13020
13021
13022/**
13023 * @opcode 0xd6
13024 * @opcodesub 11 mr/reg
13025 * @oppfx f2
13026 * @opcpuid sse2
13027 * @opgroup og_sse2_simdint_datamove
13028 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13029 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13030 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13031 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13032 * @optest op1=-42 op2=0xfedcba9876543210
13033 * -> op1=0xfedcba9876543210 ftw=0xff
13034 */
13035FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13036{
13037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13038 if (IEM_IS_MODRM_REG_MODE(bRm))
13039 {
13040 /*
13041 * Register, register.
13042 */
13043 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13044 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13046 IEM_MC_LOCAL(uint64_t, uSrc);
13047
13048 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13049 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13050 IEM_MC_FPU_TO_MMX_MODE();
13051
13052 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13053 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13054
13055 IEM_MC_ADVANCE_RIP_AND_FINISH();
13056 IEM_MC_END();
13057 }
13058
13059 /**
13060 * @opdone
13061 * @opmnemonic udf20fd6mem
13062 * @opcode 0xd6
13063 * @opcodesub !11 mr/reg
13064 * @oppfx f2
13065 * @opunused intel-modrm
13066 * @opcpuid sse
13067 * @optest ->
13068 */
13069 else
13070 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13071}
13072
13073
13074/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13075FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13076{
13077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13078 /* Docs says register only. */
13079 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13080 {
13081 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13082 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13083 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13085 IEM_MC_ARG(uint64_t *, puDst, 0);
13086 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13087 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13088 IEM_MC_PREPARE_FPU_USAGE();
13089 IEM_MC_FPU_TO_MMX_MODE();
13090
13091 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13092 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13093 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13094
13095 IEM_MC_ADVANCE_RIP_AND_FINISH();
13096 IEM_MC_END();
13097 }
13098 else
13099 IEMOP_RAISE_INVALID_OPCODE_RET();
13100}
13101
13102
13103/** Opcode 0x66 0x0f 0xd7 - */
13104FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13105{
13106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13107 /* Docs says register only. */
13108 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13109 {
13110 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13111 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13112 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13114 IEM_MC_ARG(uint64_t *, puDst, 0);
13115 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13117 IEM_MC_PREPARE_SSE_USAGE();
13118 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13119 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13120 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13121 IEM_MC_ADVANCE_RIP_AND_FINISH();
13122 IEM_MC_END();
13123 }
13124 else
13125 IEMOP_RAISE_INVALID_OPCODE_RET();
13126}
13127
13128
13129/* Opcode 0xf3 0x0f 0xd7 - invalid */
13130/* Opcode 0xf2 0x0f 0xd7 - invalid */
13131
13132
13133/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13134FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13135{
13136 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13137 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13138}
13139
13140
13141/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13142FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13143{
13144 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13145 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13146}
13147
13148
13149/* Opcode 0xf3 0x0f 0xd8 - invalid */
13150/* Opcode 0xf2 0x0f 0xd8 - invalid */
13151
13152/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13153FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13154{
13155 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13156 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13157}
13158
13159
13160/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13161FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13162{
13163 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13164 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13165}
13166
13167
13168/* Opcode 0xf3 0x0f 0xd9 - invalid */
13169/* Opcode 0xf2 0x0f 0xd9 - invalid */
13170
13171/** Opcode 0x0f 0xda - pminub Pq, Qq */
13172FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13173{
13174 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13175 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13176}
13177
13178
13179/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13180FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13181{
13182 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13183 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13184}
13185
13186/* Opcode 0xf3 0x0f 0xda - invalid */
13187/* Opcode 0xf2 0x0f 0xda - invalid */
13188
13189/** Opcode 0x0f 0xdb - pand Pq, Qq */
13190FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13191{
13192 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13193 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13194}
13195
13196
13197/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13198FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13199{
13200 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13201 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13202}
13203
13204
13205/* Opcode 0xf3 0x0f 0xdb - invalid */
13206/* Opcode 0xf2 0x0f 0xdb - invalid */
13207
13208/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13209FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13210{
13211 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13212 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13213}
13214
13215
13216/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13217FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13218{
13219 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13220 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13221}
13222
13223
13224/* Opcode 0xf3 0x0f 0xdc - invalid */
13225/* Opcode 0xf2 0x0f 0xdc - invalid */
13226
13227/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13228FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13229{
13230 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13231 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13232}
13233
13234
13235/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13236FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13237{
13238 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13239 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13240}
13241
13242
13243/* Opcode 0xf3 0x0f 0xdd - invalid */
13244/* Opcode 0xf2 0x0f 0xdd - invalid */
13245
13246/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13247FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13248{
13249 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13250 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13251}
13252
13253
13254/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13255FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13256{
13257 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13258 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13259}
13260
13261/* Opcode 0xf3 0x0f 0xde - invalid */
13262/* Opcode 0xf2 0x0f 0xde - invalid */
13263
13264
13265/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13266FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13267{
13268 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13269 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13270}
13271
13272
13273/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13274FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13275{
13276 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13277 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13278}
13279
13280
13281/* Opcode 0xf3 0x0f 0xdf - invalid */
13282/* Opcode 0xf2 0x0f 0xdf - invalid */
13283
13284/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13285FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13286{
13287 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13288 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13289}
13290
13291
13292/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13293FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13294{
13295 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13296 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13297}
13298
13299
13300/* Opcode 0xf3 0x0f 0xe0 - invalid */
13301/* Opcode 0xf2 0x0f 0xe0 - invalid */
13302
13303/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13304FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13305{
13306 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13307 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13308}
13309
13310
13311/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13312FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13313{
13314 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13315 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13316}
13317
13318
13319/* Opcode 0xf3 0x0f 0xe1 - invalid */
13320/* Opcode 0xf2 0x0f 0xe1 - invalid */
13321
13322/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13323FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13324{
13325 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13326 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13327}
13328
13329
13330/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13331FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13332{
13333 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13334 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13335}
13336
13337
13338/* Opcode 0xf3 0x0f 0xe2 - invalid */
13339/* Opcode 0xf2 0x0f 0xe2 - invalid */
13340
13341/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13342FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13343{
13344 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13345 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13346}
13347
13348
13349/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13350FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13351{
13352 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13353 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13354}
13355
13356
13357/* Opcode 0xf3 0x0f 0xe3 - invalid */
13358/* Opcode 0xf2 0x0f 0xe3 - invalid */
13359
13360/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13361FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13362{
13363 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13364 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13365}
13366
13367
13368/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13369FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13370{
13371 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13372 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13373}
13374
13375
13376/* Opcode 0xf3 0x0f 0xe4 - invalid */
13377/* Opcode 0xf2 0x0f 0xe4 - invalid */
13378
13379/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13380FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13381{
13382 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13383 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13384}
13385
13386
13387/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13388FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13389{
13390 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13391 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13392}
13393
13394
13395/* Opcode 0xf3 0x0f 0xe5 - invalid */
13396/* Opcode 0xf2 0x0f 0xe5 - invalid */
13397/* Opcode 0x0f 0xe6 - invalid */
13398
13399
13400/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13401FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13402{
13403 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13404 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13405}
13406
13407
13408/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13409FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13410{
13411 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13412 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13413}
13414
13415
13416/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13417FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13418{
13419 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13420 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13421}
13422
13423
13424/**
13425 * @opcode 0xe7
13426 * @opcodesub !11 mr/reg
13427 * @oppfx none
13428 * @opcpuid sse
13429 * @opgroup og_sse1_cachect
13430 * @opxcpttype none
13431 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13432 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13433 */
13434FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13435{
13436 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13438 if (IEM_IS_MODRM_MEM_MODE(bRm))
13439 {
13440 /* Register, memory. */
13441 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13442 IEM_MC_LOCAL(uint64_t, uSrc);
13443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13444
13445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13447 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13448 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13449 IEM_MC_FPU_TO_MMX_MODE();
13450
13451 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13452 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13453
13454 IEM_MC_ADVANCE_RIP_AND_FINISH();
13455 IEM_MC_END();
13456 }
13457 /**
13458 * @opdone
13459 * @opmnemonic ud0fe7reg
13460 * @opcode 0xe7
13461 * @opcodesub 11 mr/reg
13462 * @oppfx none
13463 * @opunused immediate
13464 * @opcpuid sse
13465 * @optest ->
13466 */
13467 else
13468 IEMOP_RAISE_INVALID_OPCODE_RET();
13469}
13470
13471/**
13472 * @opcode 0xe7
13473 * @opcodesub !11 mr/reg
13474 * @oppfx 0x66
13475 * @opcpuid sse2
13476 * @opgroup og_sse2_cachect
13477 * @opxcpttype 1
13478 * @optest op1=-1 op2=2 -> op1=2
13479 * @optest op1=0 op2=-42 -> op1=-42
13480 */
13481FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13482{
13483 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13485 if (IEM_IS_MODRM_MEM_MODE(bRm))
13486 {
13487 /* Register, memory. */
13488 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13489 IEM_MC_LOCAL(RTUINT128U, uSrc);
13490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13491
13492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13494 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13496
13497 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13498 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13499
13500 IEM_MC_ADVANCE_RIP_AND_FINISH();
13501 IEM_MC_END();
13502 }
13503
13504 /**
13505 * @opdone
13506 * @opmnemonic ud660fe7reg
13507 * @opcode 0xe7
13508 * @opcodesub 11 mr/reg
13509 * @oppfx 0x66
13510 * @opunused immediate
13511 * @opcpuid sse
13512 * @optest ->
13513 */
13514 else
13515 IEMOP_RAISE_INVALID_OPCODE_RET();
13516}
13517
13518/* Opcode 0xf3 0x0f 0xe7 - invalid */
13519/* Opcode 0xf2 0x0f 0xe7 - invalid */
13520
13521
13522/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13523FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13524{
13525 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13526 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13527}
13528
13529
13530/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13531FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13532{
13533 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13534 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13535}
13536
13537
13538/* Opcode 0xf3 0x0f 0xe8 - invalid */
13539/* Opcode 0xf2 0x0f 0xe8 - invalid */
13540
13541/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13542FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13543{
13544 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13545 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13546}
13547
13548
13549/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13550FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13551{
13552 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13553 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13554}
13555
13556
13557/* Opcode 0xf3 0x0f 0xe9 - invalid */
13558/* Opcode 0xf2 0x0f 0xe9 - invalid */
13559
13560
13561/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13562FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13563{
13564 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13565 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13566}
13567
13568
13569/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13570FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13571{
13572 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13573 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13574}
13575
13576
13577/* Opcode 0xf3 0x0f 0xea - invalid */
13578/* Opcode 0xf2 0x0f 0xea - invalid */
13579
13580
13581/** Opcode 0x0f 0xeb - por Pq, Qq */
13582FNIEMOP_DEF(iemOp_por_Pq_Qq)
13583{
13584 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13585 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13586}
13587
13588
13589/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13590FNIEMOP_DEF(iemOp_por_Vx_Wx)
13591{
13592 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13593 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13594}
13595
13596
13597/* Opcode 0xf3 0x0f 0xeb - invalid */
13598/* Opcode 0xf2 0x0f 0xeb - invalid */
13599
13600/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13601FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13602{
13603 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13604 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13605}
13606
13607
13608/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13609FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13610{
13611 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13612 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13613}
13614
13615
13616/* Opcode 0xf3 0x0f 0xec - invalid */
13617/* Opcode 0xf2 0x0f 0xec - invalid */
13618
13619/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13620FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13621{
13622 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13623 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13624}
13625
13626
13627/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13628FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13629{
13630 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13631 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13632}
13633
13634
13635/* Opcode 0xf3 0x0f 0xed - invalid */
13636/* Opcode 0xf2 0x0f 0xed - invalid */
13637
13638
13639/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13640FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13641{
13642 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13643 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13644}
13645
13646
13647/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13648FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13649{
13650 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13651 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13652}
13653
13654
13655/* Opcode 0xf3 0x0f 0xee - invalid */
13656/* Opcode 0xf2 0x0f 0xee - invalid */
13657
13658
13659/** Opcode 0x0f 0xef - pxor Pq, Qq */
13660FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13661{
13662 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13663 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13664}
13665
13666
13667/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13668FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13669{
13670 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13671 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13672}
13673
13674
13675/* Opcode 0xf3 0x0f 0xef - invalid */
13676/* Opcode 0xf2 0x0f 0xef - invalid */
13677
13678/* Opcode 0x0f 0xf0 - invalid */
13679/* Opcode 0x66 0x0f 0xf0 - invalid */
13680
13681
13682/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13683FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13684{
13685 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13687 if (IEM_IS_MODRM_REG_MODE(bRm))
13688 {
13689 /*
13690 * Register, register - (not implemented, assuming it raises \#UD).
13691 */
13692 IEMOP_RAISE_INVALID_OPCODE_RET();
13693 }
13694 else
13695 {
13696 /*
13697 * Register, memory.
13698 */
13699 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13700 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13702
13703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13705 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13706 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13707 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13708 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13709
13710 IEM_MC_ADVANCE_RIP_AND_FINISH();
13711 IEM_MC_END();
13712 }
13713}
13714
13715
13716/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13717FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13718{
13719 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13720 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13721}
13722
13723
13724/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13725FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13726{
13727 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13728 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13729}
13730
13731
13732/* Opcode 0xf2 0x0f 0xf1 - invalid */
13733
13734/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13735FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13736{
13737 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13738 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13739}
13740
13741
13742/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13743FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13744{
13745 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13746 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13747}
13748
13749
13750/* Opcode 0xf2 0x0f 0xf2 - invalid */
13751
13752/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13753FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13754{
13755 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13756 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13757}
13758
13759
13760/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13761FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13762{
13763 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13764 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13765}
13766
13767/* Opcode 0xf2 0x0f 0xf3 - invalid */
13768
13769/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13770FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13771{
13772 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13773 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13774}
13775
13776
13777/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13778FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13779{
13780 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13781 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13782}
13783
13784
13785/* Opcode 0xf2 0x0f 0xf4 - invalid */
13786
13787/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13788FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13789{
13790 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13791 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13792}
13793
13794
13795/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13796FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13797{
13798 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13799 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13800}
13801
13802/* Opcode 0xf2 0x0f 0xf5 - invalid */
13803
13804/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13805FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13806{
13807 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13808 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13809}
13810
13811
13812/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13813FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13814{
13815 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13816 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13817}
13818
13819
13820/* Opcode 0xf2 0x0f 0xf6 - invalid */
13821
13822/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13823FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13824/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13825FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13826/* Opcode 0xf2 0x0f 0xf7 - invalid */
13827
13828
13829/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13830FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13831{
13832 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13833 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13834}
13835
13836
13837/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13838FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13839{
13840 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13841 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13842}
13843
13844
13845/* Opcode 0xf2 0x0f 0xf8 - invalid */
13846
13847
13848/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13849FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13850{
13851 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13852 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13853}
13854
13855
13856/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13857FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13858{
13859 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13860 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13861}
13862
13863
13864/* Opcode 0xf2 0x0f 0xf9 - invalid */
13865
13866
13867/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13868FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13869{
13870 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13871 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13872}
13873
13874
13875/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13876FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13877{
13878 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13879 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13880}
13881
13882
13883/* Opcode 0xf2 0x0f 0xfa - invalid */
13884
13885
13886/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13887FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13888{
13889 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13890 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13891}
13892
13893
13894/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13895FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13896{
13897 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13898 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13899}
13900
13901
13902/* Opcode 0xf2 0x0f 0xfb - invalid */
13903
13904
13905/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13906FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13907{
13908 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13909 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13910}
13911
13912
13913/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13914FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13915{
13916 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13917 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13918}
13919
13920
13921/* Opcode 0xf2 0x0f 0xfc - invalid */
13922
13923
13924/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13925FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13926{
13927 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13928 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13929}
13930
13931
13932/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13933FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13934{
13935 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13936 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13937}
13938
13939
13940/* Opcode 0xf2 0x0f 0xfd - invalid */
13941
13942
13943/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13944FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13945{
13946 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13947 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13948}
13949
13950
13951/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13952FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13953{
13954 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13955 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13956}
13957
13958
13959/* Opcode 0xf2 0x0f 0xfe - invalid */
13960
13961
13962/** Opcode **** 0x0f 0xff - UD0 */
13963FNIEMOP_DEF(iemOp_ud0)
13964{
13965 IEMOP_MNEMONIC(ud0, "ud0");
13966 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13967 {
13968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13969 if (IEM_IS_MODRM_MEM_MODE(bRm))
13970 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
13971 }
13972 IEMOP_HLP_DONE_DECODING();
13973 IEMOP_RAISE_INVALID_OPCODE_RET();
13974}
13975
13976
13977
13978/**
13979 * Two byte opcode map, first byte 0x0f.
13980 *
13981 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
13982 * check if it needs updating as well when making changes.
13983 */
13984const PFNIEMOP g_apfnTwoByteMap[] =
13985{
13986 /* no prefix, 066h prefix f3h prefix, f2h prefix */
13987 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
13988 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
13989 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
13990 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
13991 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
13992 /* 0x05 */ IEMOP_X4(iemOp_syscall),
13993 /* 0x06 */ IEMOP_X4(iemOp_clts),
13994 /* 0x07 */ IEMOP_X4(iemOp_sysret),
13995 /* 0x08 */ IEMOP_X4(iemOp_invd),
13996 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
13997 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
13998 /* 0x0b */ IEMOP_X4(iemOp_ud2),
13999 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14000 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14001 /* 0x0e */ IEMOP_X4(iemOp_femms),
14002 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14003
14004 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14005 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14006 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14007 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14008 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14009 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14010 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14011 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14012 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14013 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14014 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14015 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14016 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14017 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14018 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14019 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14020
14021 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14022 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14023 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14024 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14025 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14026 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14027 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14028 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14029 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14030 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14031 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14032 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14033 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14034 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14035 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14036 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14037
14038 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14039 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14040 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14041 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14042 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14043 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14044 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14045 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14046 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14047 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14048 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14049 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14050 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14051 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14052 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14053 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14054
14055 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14056 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14057 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14058 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14059 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14060 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14061 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14062 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14063 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14064 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14065 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14066 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14067 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14068 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14069 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14070 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14071
14072 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14073 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14074 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14075 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14076 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14077 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14078 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14079 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14080 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14081 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14082 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14083 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14084 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14085 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14086 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14087 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14088
14089 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14090 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14091 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14092 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14093 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14094 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14095 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14096 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14097 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14098 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14099 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14102 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14104 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14105
14106 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14107 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14108 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14109 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14110 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14111 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14112 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14113 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14114
14115 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14120 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14121 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14122 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14123
14124 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14125 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14126 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14127 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14128 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14129 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14130 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14131 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14132 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14133 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14134 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14135 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14136 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14137 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14138 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14139 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14140
14141 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14142 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14143 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14144 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14145 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14146 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14147 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14148 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14149 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14150 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14151 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14152 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14153 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14154 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14155 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14156 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14157
14158 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14159 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14160 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14161 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14162 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14163 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14164 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14165 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14166 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14167 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14168 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14169 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14170 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14171 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14172 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14173 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14174
14175 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14176 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14177 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14178 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14179 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14180 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14181 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14182 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14183 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14184 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14185 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14186 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14187 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14188 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14189 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14190 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14191
14192 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14193 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14194 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14195 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14196 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14197 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14198 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14199 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14200 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14201 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14202 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14203 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14204 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14205 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14206 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14207 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14208
14209 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14210 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14211 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14212 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14213 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14214 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14215 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14216 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14217 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14218 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14219 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14220 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14221 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14222 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14223 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14224 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14225
14226 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14227 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14228 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14229 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14230 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14231 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14232 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14233 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14234 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14235 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14236 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14237 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14238 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14239 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14240 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14241 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14242
14243 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14244 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14245 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14246 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14247 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14248 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14249 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14250 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14251 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14252 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14253 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14254 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14255 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14256 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14257 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14258 /* 0xff */ IEMOP_X4(iemOp_ud0),
14259};
14260AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14261
14262/** @} */
14263
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette