VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96701

Last change on this file since 96701 was 96701, checked in by vboxsync, 2 years ago

VMM/IEM: Implement [v]pinsrw instruction, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 403.3 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96701 2022-09-12 14:45:37Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
53 IEM_MC_BEGIN(2, 0);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
59 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
60 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
61 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
62 IEM_MC_FPU_TO_MMX_MODE();
63 IEM_MC_ADVANCE_RIP();
64 IEM_MC_END();
65 }
66 else
67 {
68 /*
69 * Register, memory.
70 */
71 IEM_MC_BEGIN(2, 2);
72 IEM_MC_ARG(uint64_t *, pDst, 0);
73 IEM_MC_LOCAL(uint64_t, uSrc);
74 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
75 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
76
77 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
79 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
80 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
81
82 IEM_MC_PREPARE_FPU_USAGE();
83 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
84 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
85 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
86 IEM_MC_FPU_TO_MMX_MODE();
87
88 IEM_MC_ADVANCE_RIP();
89 IEM_MC_END();
90 }
91 return VINF_SUCCESS;
92}
93
94
95/**
96 * Common worker for MMX instructions on the form:
97 * pxxx mm1, mm2/mem64
98 *
99 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
100 * no FXSAVE state, just the operands.
101 */
102FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
103{
104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
105 if (IEM_IS_MODRM_REG_MODE(bRm))
106 {
107 /*
108 * Register, register.
109 */
110 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
111 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
113 IEM_MC_BEGIN(2, 0);
114 IEM_MC_ARG(uint64_t *, pDst, 0);
115 IEM_MC_ARG(uint64_t const *, pSrc, 1);
116 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
117 IEM_MC_PREPARE_FPU_USAGE();
118 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
119 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
120 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
121 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
122 IEM_MC_FPU_TO_MMX_MODE();
123 IEM_MC_ADVANCE_RIP();
124 IEM_MC_END();
125 }
126 else
127 {
128 /*
129 * Register, memory.
130 */
131 IEM_MC_BEGIN(2, 2);
132 IEM_MC_ARG(uint64_t *, pDst, 0);
133 IEM_MC_LOCAL(uint64_t, uSrc);
134 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
136
137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
139 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
140 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
141
142 IEM_MC_PREPARE_FPU_USAGE();
143 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
144 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
145 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
146 IEM_MC_FPU_TO_MMX_MODE();
147
148 IEM_MC_ADVANCE_RIP();
149 IEM_MC_END();
150 }
151 return VINF_SUCCESS;
152}
153
154
155/**
156 * Common worker for MMX instructions on the form:
157 * pxxx mm1, mm2/mem64
158 * for instructions introduced with SSE.
159 */
160FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
161{
162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
163 if (IEM_IS_MODRM_REG_MODE(bRm))
164 {
165 /*
166 * Register, register.
167 */
168 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
169 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
171 IEM_MC_BEGIN(2, 0);
172 IEM_MC_ARG(uint64_t *, pDst, 0);
173 IEM_MC_ARG(uint64_t const *, pSrc, 1);
174 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
175 IEM_MC_PREPARE_FPU_USAGE();
176 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
177 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
178 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
179 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
180 IEM_MC_FPU_TO_MMX_MODE();
181 IEM_MC_ADVANCE_RIP();
182 IEM_MC_END();
183 }
184 else
185 {
186 /*
187 * Register, memory.
188 */
189 IEM_MC_BEGIN(2, 2);
190 IEM_MC_ARG(uint64_t *, pDst, 0);
191 IEM_MC_LOCAL(uint64_t, uSrc);
192 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
194
195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
197 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
198 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
199
200 IEM_MC_PREPARE_FPU_USAGE();
201 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
202 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
203 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
204 IEM_MC_FPU_TO_MMX_MODE();
205
206 IEM_MC_ADVANCE_RIP();
207 IEM_MC_END();
208 }
209 return VINF_SUCCESS;
210}
211
212
213/**
214 * Common worker for MMX instructions on the form:
215 * pxxx mm1, mm2/mem64
216 * for instructions introduced with SSE.
217 *
218 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
219 * no FXSAVE state, just the operands.
220 */
221FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
222{
223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
224 if (IEM_IS_MODRM_REG_MODE(bRm))
225 {
226 /*
227 * Register, register.
228 */
229 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
230 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
232 IEM_MC_BEGIN(2, 0);
233 IEM_MC_ARG(uint64_t *, pDst, 0);
234 IEM_MC_ARG(uint64_t const *, pSrc, 1);
235 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
236 IEM_MC_PREPARE_FPU_USAGE();
237 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
238 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
239 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
240 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
241 IEM_MC_FPU_TO_MMX_MODE();
242 IEM_MC_ADVANCE_RIP();
243 IEM_MC_END();
244 }
245 else
246 {
247 /*
248 * Register, memory.
249 */
250 IEM_MC_BEGIN(2, 2);
251 IEM_MC_ARG(uint64_t *, pDst, 0);
252 IEM_MC_LOCAL(uint64_t, uSrc);
253 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
255
256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
258 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
259 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
260
261 IEM_MC_PREPARE_FPU_USAGE();
262 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
263 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
264 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
265 IEM_MC_FPU_TO_MMX_MODE();
266
267 IEM_MC_ADVANCE_RIP();
268 IEM_MC_END();
269 }
270 return VINF_SUCCESS;
271}
272
273
274/**
275 * Common worker for MMX instructions on the form:
276 * pxxx mm1, mm2/mem64
277 * that was introduced with SSE2.
278 */
279FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
280{
281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
282 if (IEM_IS_MODRM_REG_MODE(bRm))
283 {
284 /*
285 * Register, register.
286 */
287 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
288 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_BEGIN(2, 0);
291 IEM_MC_ARG(uint64_t *, pDst, 0);
292 IEM_MC_ARG(uint64_t const *, pSrc, 1);
293 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
294 IEM_MC_PREPARE_FPU_USAGE();
295 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
296 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
297 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
298 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
299 IEM_MC_FPU_TO_MMX_MODE();
300 IEM_MC_ADVANCE_RIP();
301 IEM_MC_END();
302 }
303 else
304 {
305 /*
306 * Register, memory.
307 */
308 IEM_MC_BEGIN(2, 2);
309 IEM_MC_ARG(uint64_t *, pDst, 0);
310 IEM_MC_LOCAL(uint64_t, uSrc);
311 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
313
314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
316 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
317 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
318
319 IEM_MC_PREPARE_FPU_USAGE();
320 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
321 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
322 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
323 IEM_MC_FPU_TO_MMX_MODE();
324
325 IEM_MC_ADVANCE_RIP();
326 IEM_MC_END();
327 }
328 return VINF_SUCCESS;
329}
330
331
332/**
333 * Common worker for SSE2 instructions on the forms:
334 * pxxx xmm1, xmm2/mem128
335 *
336 * Proper alignment of the 128-bit operand is enforced.
337 * Exceptions type 4. SSE2 cpuid checks.
338 *
339 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
340 */
341FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
342{
343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
344 if (IEM_IS_MODRM_REG_MODE(bRm))
345 {
346 /*
347 * Register, register.
348 */
349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
350 IEM_MC_BEGIN(2, 0);
351 IEM_MC_ARG(PRTUINT128U, pDst, 0);
352 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
353 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
354 IEM_MC_PREPARE_SSE_USAGE();
355 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
356 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
357 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
358 IEM_MC_ADVANCE_RIP();
359 IEM_MC_END();
360 }
361 else
362 {
363 /*
364 * Register, memory.
365 */
366 IEM_MC_BEGIN(2, 2);
367 IEM_MC_ARG(PRTUINT128U, pDst, 0);
368 IEM_MC_LOCAL(RTUINT128U, uSrc);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
371
372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
374 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
375 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
376
377 IEM_MC_PREPARE_SSE_USAGE();
378 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
379 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
380
381 IEM_MC_ADVANCE_RIP();
382 IEM_MC_END();
383 }
384 return VINF_SUCCESS;
385}
386
387
388/**
389 * Common worker for SSE2 instructions on the forms:
390 * pxxx xmm1, xmm2/mem128
391 *
392 * Proper alignment of the 128-bit operand is enforced.
393 * Exceptions type 4. SSE2 cpuid checks.
394 *
395 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
396 * no FXSAVE state, just the operands.
397 *
398 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
399 */
400FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
401{
402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
403 if (IEM_IS_MODRM_REG_MODE(bRm))
404 {
405 /*
406 * Register, register.
407 */
408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
409 IEM_MC_BEGIN(2, 0);
410 IEM_MC_ARG(PRTUINT128U, pDst, 0);
411 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
412 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
413 IEM_MC_PREPARE_SSE_USAGE();
414 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
415 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
416 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
417 IEM_MC_ADVANCE_RIP();
418 IEM_MC_END();
419 }
420 else
421 {
422 /*
423 * Register, memory.
424 */
425 IEM_MC_BEGIN(2, 2);
426 IEM_MC_ARG(PRTUINT128U, pDst, 0);
427 IEM_MC_LOCAL(RTUINT128U, uSrc);
428 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430
431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
434 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
435
436 IEM_MC_PREPARE_SSE_USAGE();
437 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
438 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
439
440 IEM_MC_ADVANCE_RIP();
441 IEM_MC_END();
442 }
443 return VINF_SUCCESS;
444}
445
446
447/**
448 * Common worker for MMX instructions on the forms:
449 * pxxxx mm1, mm2/mem32
450 *
451 * The 2nd operand is the first half of a register, which in the memory case
452 * means a 32-bit memory access.
453 */
454FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
455{
456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
457 if (IEM_IS_MODRM_REG_MODE(bRm))
458 {
459 /*
460 * Register, register.
461 */
462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
463 IEM_MC_BEGIN(2, 0);
464 IEM_MC_ARG(uint64_t *, puDst, 0);
465 IEM_MC_ARG(uint64_t const *, puSrc, 1);
466 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
467 IEM_MC_PREPARE_FPU_USAGE();
468 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
469 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
470 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
471 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
472 IEM_MC_FPU_TO_MMX_MODE();
473 IEM_MC_ADVANCE_RIP();
474 IEM_MC_END();
475 }
476 else
477 {
478 /*
479 * Register, memory.
480 */
481 IEM_MC_BEGIN(2, 2);
482 IEM_MC_ARG(uint64_t *, puDst, 0);
483 IEM_MC_LOCAL(uint64_t, uSrc);
484 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
486
487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
489 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
490 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
491
492 IEM_MC_PREPARE_FPU_USAGE();
493 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
494 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
495 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
496 IEM_MC_FPU_TO_MMX_MODE();
497
498 IEM_MC_ADVANCE_RIP();
499 IEM_MC_END();
500 }
501 return VINF_SUCCESS;
502}
503
504
505/**
506 * Common worker for SSE instructions on the forms:
507 * pxxxx xmm1, xmm2/mem128
508 *
509 * The 2nd operand is the first half of a register, which in the memory case
510 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
511 *
512 * Exceptions type 4.
513 */
514FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
515{
516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
517 if (IEM_IS_MODRM_REG_MODE(bRm))
518 {
519 /*
520 * Register, register.
521 */
522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
523 IEM_MC_BEGIN(2, 0);
524 IEM_MC_ARG(PRTUINT128U, puDst, 0);
525 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
526 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
528 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
529 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
530 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
531 IEM_MC_ADVANCE_RIP();
532 IEM_MC_END();
533 }
534 else
535 {
536 /*
537 * Register, memory.
538 */
539 IEM_MC_BEGIN(2, 2);
540 IEM_MC_ARG(PRTUINT128U, puDst, 0);
541 IEM_MC_LOCAL(RTUINT128U, uSrc);
542 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
544
545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
548 /** @todo Most CPUs probably only read the low qword. We read everything to
549 * make sure we apply segmentation and alignment checks correctly.
550 * When we have time, it would be interesting to explore what real
551 * CPUs actually does and whether it will do a TLB load for the high
552 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
553 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
554
555 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
556 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
557 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
558
559 IEM_MC_ADVANCE_RIP();
560 IEM_MC_END();
561 }
562 return VINF_SUCCESS;
563}
564
565
566/**
567 * Common worker for SSE2 instructions on the forms:
568 * pxxxx xmm1, xmm2/mem128
569 *
570 * The 2nd operand is the first half of a register, which in the memory case
571 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
572 *
573 * Exceptions type 4.
574 */
575FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
576{
577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
578 if (IEM_IS_MODRM_REG_MODE(bRm))
579 {
580 /*
581 * Register, register.
582 */
583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
584 IEM_MC_BEGIN(2, 0);
585 IEM_MC_ARG(PRTUINT128U, puDst, 0);
586 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
588 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
589 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
590 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
591 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
592 IEM_MC_ADVANCE_RIP();
593 IEM_MC_END();
594 }
595 else
596 {
597 /*
598 * Register, memory.
599 */
600 IEM_MC_BEGIN(2, 2);
601 IEM_MC_ARG(PRTUINT128U, puDst, 0);
602 IEM_MC_LOCAL(RTUINT128U, uSrc);
603 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
605
606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
608 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
609 /** @todo Most CPUs probably only read the low qword. We read everything to
610 * make sure we apply segmentation and alignment checks correctly.
611 * When we have time, it would be interesting to explore what real
612 * CPUs actually does and whether it will do a TLB load for the high
613 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
614 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
615
616 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
617 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
618 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
619
620 IEM_MC_ADVANCE_RIP();
621 IEM_MC_END();
622 }
623 return VINF_SUCCESS;
624}
625
626
627/**
628 * Common worker for MMX instructions on the form:
629 * pxxxx mm1, mm2/mem64
630 *
631 * The 2nd operand is the second half of a register, which in the memory case
632 * means a 64-bit memory access for MMX.
633 */
634FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
635{
636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
637 if (IEM_IS_MODRM_REG_MODE(bRm))
638 {
639 /*
640 * Register, register.
641 */
642 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
643 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
645 IEM_MC_BEGIN(2, 0);
646 IEM_MC_ARG(uint64_t *, puDst, 0);
647 IEM_MC_ARG(uint64_t const *, puSrc, 1);
648 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
649 IEM_MC_PREPARE_FPU_USAGE();
650 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
651 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
652 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
653 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
654 IEM_MC_FPU_TO_MMX_MODE();
655 IEM_MC_ADVANCE_RIP();
656 IEM_MC_END();
657 }
658 else
659 {
660 /*
661 * Register, memory.
662 */
663 IEM_MC_BEGIN(2, 2);
664 IEM_MC_ARG(uint64_t *, puDst, 0);
665 IEM_MC_LOCAL(uint64_t, uSrc);
666 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
668
669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
671 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
672 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
673
674 IEM_MC_PREPARE_FPU_USAGE();
675 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
676 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
677 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
678 IEM_MC_FPU_TO_MMX_MODE();
679
680 IEM_MC_ADVANCE_RIP();
681 IEM_MC_END();
682 }
683 return VINF_SUCCESS;
684}
685
686
687/**
688 * Common worker for SSE instructions on the form:
689 * pxxxx xmm1, xmm2/mem128
690 *
691 * The 2nd operand is the second half of a register, which for SSE a 128-bit
692 * aligned access where it may read the full 128 bits or only the upper 64 bits.
693 *
694 * Exceptions type 4.
695 */
696FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
697{
698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
699 if (IEM_IS_MODRM_REG_MODE(bRm))
700 {
701 /*
702 * Register, register.
703 */
704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
705 IEM_MC_BEGIN(2, 0);
706 IEM_MC_ARG(PRTUINT128U, puDst, 0);
707 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
708 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
709 IEM_MC_PREPARE_SSE_USAGE();
710 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
711 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
712 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
713 IEM_MC_ADVANCE_RIP();
714 IEM_MC_END();
715 }
716 else
717 {
718 /*
719 * Register, memory.
720 */
721 IEM_MC_BEGIN(2, 2);
722 IEM_MC_ARG(PRTUINT128U, puDst, 0);
723 IEM_MC_LOCAL(RTUINT128U, uSrc);
724 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
726
727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
730 /** @todo Most CPUs probably only read the high qword. We read everything to
731 * make sure we apply segmentation and alignment checks correctly.
732 * When we have time, it would be interesting to explore what real
733 * CPUs actually does and whether it will do a TLB load for the lower
734 * part or skip any associated \#PF. */
735 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
736
737 IEM_MC_PREPARE_SSE_USAGE();
738 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
740
741 IEM_MC_ADVANCE_RIP();
742 IEM_MC_END();
743 }
744 return VINF_SUCCESS;
745}
746
747
748/**
749 * Common worker for SSE instructions on the forms:
750 * pxxs xmm1, xmm2/mem128
751 *
752 * Proper alignment of the 128-bit operand is enforced.
753 * Exceptions type 2. SSE cpuid checks.
754 *
755 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
756 */
757FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
758{
759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
760 if (IEM_IS_MODRM_REG_MODE(bRm))
761 {
762 /*
763 * Register, register.
764 */
765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
766 IEM_MC_BEGIN(3, 1);
767 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
768 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
769 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
770 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
771 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
772 IEM_MC_PREPARE_SSE_USAGE();
773 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
774 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
775 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
776 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
777 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
778
779 IEM_MC_ADVANCE_RIP();
780 IEM_MC_END();
781 }
782 else
783 {
784 /*
785 * Register, memory.
786 */
787 IEM_MC_BEGIN(3, 2);
788 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
789 IEM_MC_LOCAL(X86XMMREG, uSrc2);
790 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
791 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
792 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
794
795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
797 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
798 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
799
800 IEM_MC_PREPARE_SSE_USAGE();
801 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
802 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
803 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
805
806 IEM_MC_ADVANCE_RIP();
807 IEM_MC_END();
808 }
809 return VINF_SUCCESS;
810}
811
812
813/**
814 * Common worker for SSE instructions on the forms:
815 * pxxs xmm1, xmm2/mem32
816 *
817 * Proper alignment of the 128-bit operand is enforced.
818 * Exceptions type 2. SSE cpuid checks.
819 *
820 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
821 */
822FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
823{
824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
825 if (IEM_IS_MODRM_REG_MODE(bRm))
826 {
827 /*
828 * Register, register.
829 */
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_BEGIN(3, 1);
832 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
833 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
834 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
835 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
837 IEM_MC_PREPARE_SSE_USAGE();
838 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
839 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
840 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
841 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
842 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
843
844 IEM_MC_ADVANCE_RIP();
845 IEM_MC_END();
846 }
847 else
848 {
849 /*
850 * Register, memory.
851 */
852 IEM_MC_BEGIN(3, 2);
853 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
854 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
855 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
856 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
857 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
859
860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
862 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
863 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
864
865 IEM_MC_PREPARE_SSE_USAGE();
866 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
867 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
868 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
869 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
870
871 IEM_MC_ADVANCE_RIP();
872 IEM_MC_END();
873 }
874 return VINF_SUCCESS;
875}
876
877
878/**
879 * Common worker for SSE2 instructions on the forms:
880 * pxxd xmm1, xmm2/mem128
881 *
882 * Proper alignment of the 128-bit operand is enforced.
883 * Exceptions type 2. SSE cpuid checks.
884 *
885 * @sa iemOpCommonSseFp_FullFull_To_Full
886 */
887FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
888{
889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
890 if (IEM_IS_MODRM_REG_MODE(bRm))
891 {
892 /*
893 * Register, register.
894 */
895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
896 IEM_MC_BEGIN(3, 1);
897 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
898 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
899 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
900 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
901 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
902 IEM_MC_PREPARE_SSE_USAGE();
903 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
904 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
905 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
906 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
907 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
908
909 IEM_MC_ADVANCE_RIP();
910 IEM_MC_END();
911 }
912 else
913 {
914 /*
915 * Register, memory.
916 */
917 IEM_MC_BEGIN(3, 2);
918 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
919 IEM_MC_LOCAL(X86XMMREG, uSrc2);
920 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
921 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
922 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
924
925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
927 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
928 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
929
930 IEM_MC_PREPARE_SSE_USAGE();
931 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
933 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
934 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
935
936 IEM_MC_ADVANCE_RIP();
937 IEM_MC_END();
938 }
939 return VINF_SUCCESS;
940}
941
942
943/**
944 * Common worker for SSE2 instructions on the forms:
945 * pxxs xmm1, xmm2/mem64
946 *
947 * Proper alignment of the 128-bit operand is enforced.
948 * Exceptions type 2. SSE2 cpuid checks.
949 *
950 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
951 */
952FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
953{
954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
955 if (IEM_IS_MODRM_REG_MODE(bRm))
956 {
957 /*
958 * Register, register.
959 */
960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
961 IEM_MC_BEGIN(3, 1);
962 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
963 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
964 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
965 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
966 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
967 IEM_MC_PREPARE_SSE_USAGE();
968 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
970 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
971 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
972 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
973
974 IEM_MC_ADVANCE_RIP();
975 IEM_MC_END();
976 }
977 else
978 {
979 /*
980 * Register, memory.
981 */
982 IEM_MC_BEGIN(3, 2);
983 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
984 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
985 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
986 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
987 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
989
990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
992 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
993 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
994
995 IEM_MC_PREPARE_SSE_USAGE();
996 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
997 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
998 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
999 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1000
1001 IEM_MC_ADVANCE_RIP();
1002 IEM_MC_END();
1003 }
1004 return VINF_SUCCESS;
1005}
1006
1007
1008/**
1009 * Common worker for SSE2 instructions on the form:
1010 * pxxxx xmm1, xmm2/mem128
1011 *
1012 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1013 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1014 *
1015 * Exceptions type 4.
1016 */
1017FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1018{
1019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1020 if (IEM_IS_MODRM_REG_MODE(bRm))
1021 {
1022 /*
1023 * Register, register.
1024 */
1025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1026 IEM_MC_BEGIN(2, 0);
1027 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1028 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1029 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1030 IEM_MC_PREPARE_SSE_USAGE();
1031 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1032 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1033 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1034 IEM_MC_ADVANCE_RIP();
1035 IEM_MC_END();
1036 }
1037 else
1038 {
1039 /*
1040 * Register, memory.
1041 */
1042 IEM_MC_BEGIN(2, 2);
1043 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1044 IEM_MC_LOCAL(RTUINT128U, uSrc);
1045 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1047
1048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1050 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1051 /** @todo Most CPUs probably only read the high qword. We read everything to
1052 * make sure we apply segmentation and alignment checks correctly.
1053 * When we have time, it would be interesting to explore what real
1054 * CPUs actually does and whether it will do a TLB load for the lower
1055 * part or skip any associated \#PF. */
1056 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1057
1058 IEM_MC_PREPARE_SSE_USAGE();
1059 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1061
1062 IEM_MC_ADVANCE_RIP();
1063 IEM_MC_END();
1064 }
1065 return VINF_SUCCESS;
1066}
1067
1068
1069/**
1070 * Common worker for SSE3 instructions on the forms:
1071 * hxxx xmm1, xmm2/mem128
1072 *
1073 * Proper alignment of the 128-bit operand is enforced.
1074 * Exceptions type 2. SSE3 cpuid checks.
1075 *
1076 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1077 */
1078FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1079{
1080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1081 if (IEM_IS_MODRM_REG_MODE(bRm))
1082 {
1083 /*
1084 * Register, register.
1085 */
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1087 IEM_MC_BEGIN(3, 1);
1088 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1089 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1090 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1091 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1092 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1093 IEM_MC_PREPARE_SSE_USAGE();
1094 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1095 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1096 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1097 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1098 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1099
1100 IEM_MC_ADVANCE_RIP();
1101 IEM_MC_END();
1102 }
1103 else
1104 {
1105 /*
1106 * Register, memory.
1107 */
1108 IEM_MC_BEGIN(3, 2);
1109 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1110 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1111 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1112 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1113 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1115
1116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1118 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1119 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1120
1121 IEM_MC_PREPARE_SSE_USAGE();
1122 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1123 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1124 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1125 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1126
1127 IEM_MC_ADVANCE_RIP();
1128 IEM_MC_END();
1129 }
1130 return VINF_SUCCESS;
1131}
1132
1133
1134/** Opcode 0x0f 0x00 /0. */
1135FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1136{
1137 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1138 IEMOP_HLP_MIN_286();
1139 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1140
1141 if (IEM_IS_MODRM_REG_MODE(bRm))
1142 {
1143 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1144 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1145 }
1146
1147 /* Ignore operand size here, memory refs are always 16-bit. */
1148 IEM_MC_BEGIN(2, 0);
1149 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1150 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1152 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1153 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1154 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1155 IEM_MC_END();
1156 return VINF_SUCCESS;
1157}
1158
1159
1160/** Opcode 0x0f 0x00 /1. */
1161FNIEMOPRM_DEF(iemOp_Grp6_str)
1162{
1163 IEMOP_MNEMONIC(str, "str Rv/Mw");
1164 IEMOP_HLP_MIN_286();
1165 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1166
1167
1168 if (IEM_IS_MODRM_REG_MODE(bRm))
1169 {
1170 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1171 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1172 }
1173
1174 /* Ignore operand size here, memory refs are always 16-bit. */
1175 IEM_MC_BEGIN(2, 0);
1176 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1177 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1179 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1180 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1181 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1182 IEM_MC_END();
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x00 /2. */
1188FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1189{
1190 IEMOP_MNEMONIC(lldt, "lldt Ew");
1191 IEMOP_HLP_MIN_286();
1192 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1193
1194 if (IEM_IS_MODRM_REG_MODE(bRm))
1195 {
1196 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1197 IEM_MC_BEGIN(1, 0);
1198 IEM_MC_ARG(uint16_t, u16Sel, 0);
1199 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1200 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1201 IEM_MC_END();
1202 }
1203 else
1204 {
1205 IEM_MC_BEGIN(1, 1);
1206 IEM_MC_ARG(uint16_t, u16Sel, 0);
1207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1209 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1210 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1211 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1212 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1213 IEM_MC_END();
1214 }
1215 return VINF_SUCCESS;
1216}
1217
1218
1219/** Opcode 0x0f 0x00 /3. */
1220FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1221{
1222 IEMOP_MNEMONIC(ltr, "ltr Ew");
1223 IEMOP_HLP_MIN_286();
1224 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1225
1226 if (IEM_IS_MODRM_REG_MODE(bRm))
1227 {
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEM_MC_BEGIN(1, 0);
1230 IEM_MC_ARG(uint16_t, u16Sel, 0);
1231 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1232 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1233 IEM_MC_END();
1234 }
1235 else
1236 {
1237 IEM_MC_BEGIN(1, 1);
1238 IEM_MC_ARG(uint16_t, u16Sel, 0);
1239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1242 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1243 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1244 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1245 IEM_MC_END();
1246 }
1247 return VINF_SUCCESS;
1248}
1249
1250
1251/** Opcode 0x0f 0x00 /3. */
1252FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1253{
1254 IEMOP_HLP_MIN_286();
1255 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1256
1257 if (IEM_IS_MODRM_REG_MODE(bRm))
1258 {
1259 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1260 IEM_MC_BEGIN(2, 0);
1261 IEM_MC_ARG(uint16_t, u16Sel, 0);
1262 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1263 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1264 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1265 IEM_MC_END();
1266 }
1267 else
1268 {
1269 IEM_MC_BEGIN(2, 1);
1270 IEM_MC_ARG(uint16_t, u16Sel, 0);
1271 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1274 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1275 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1276 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1277 IEM_MC_END();
1278 }
1279 return VINF_SUCCESS;
1280}
1281
1282
1283/** Opcode 0x0f 0x00 /4. */
1284FNIEMOPRM_DEF(iemOp_Grp6_verr)
1285{
1286 IEMOP_MNEMONIC(verr, "verr Ew");
1287 IEMOP_HLP_MIN_286();
1288 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1289}
1290
1291
1292/** Opcode 0x0f 0x00 /5. */
1293FNIEMOPRM_DEF(iemOp_Grp6_verw)
1294{
1295 IEMOP_MNEMONIC(verw, "verw Ew");
1296 IEMOP_HLP_MIN_286();
1297 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1298}
1299
1300
1301/**
1302 * Group 6 jump table.
1303 */
1304IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1305{
1306 iemOp_Grp6_sldt,
1307 iemOp_Grp6_str,
1308 iemOp_Grp6_lldt,
1309 iemOp_Grp6_ltr,
1310 iemOp_Grp6_verr,
1311 iemOp_Grp6_verw,
1312 iemOp_InvalidWithRM,
1313 iemOp_InvalidWithRM
1314};
1315
1316/** Opcode 0x0f 0x00. */
1317FNIEMOP_DEF(iemOp_Grp6)
1318{
1319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1320 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1321}
1322
1323
1324/** Opcode 0x0f 0x01 /0. */
1325FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1326{
1327 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1328 IEMOP_HLP_MIN_286();
1329 IEMOP_HLP_64BIT_OP_SIZE();
1330 IEM_MC_BEGIN(2, 1);
1331 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1332 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1335 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1336 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1337 IEM_MC_END();
1338 return VINF_SUCCESS;
1339}
1340
1341
1342/** Opcode 0x0f 0x01 /0. */
1343FNIEMOP_DEF(iemOp_Grp7_vmcall)
1344{
1345 IEMOP_MNEMONIC(vmcall, "vmcall");
1346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1347
1348 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1349 want all hypercalls regardless of instruction used, and if a
1350 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1351 (NEM/win makes ASSUMPTIONS about this behavior.) */
1352 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1353}
1354
1355
1356/** Opcode 0x0f 0x01 /0. */
1357#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1358FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1359{
1360 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1361 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1362 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1363 IEMOP_HLP_DONE_DECODING();
1364 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1365}
1366#else
1367FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1368{
1369 IEMOP_BITCH_ABOUT_STUB();
1370 return IEMOP_RAISE_INVALID_OPCODE();
1371}
1372#endif
1373
1374
1375/** Opcode 0x0f 0x01 /0. */
1376#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1377FNIEMOP_DEF(iemOp_Grp7_vmresume)
1378{
1379 IEMOP_MNEMONIC(vmresume, "vmresume");
1380 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1381 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1382 IEMOP_HLP_DONE_DECODING();
1383 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1384}
1385#else
1386FNIEMOP_DEF(iemOp_Grp7_vmresume)
1387{
1388 IEMOP_BITCH_ABOUT_STUB();
1389 return IEMOP_RAISE_INVALID_OPCODE();
1390}
1391#endif
1392
1393
1394/** Opcode 0x0f 0x01 /0. */
1395#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1396FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1397{
1398 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1399 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1400 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1401 IEMOP_HLP_DONE_DECODING();
1402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1403}
1404#else
1405FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1406{
1407 IEMOP_BITCH_ABOUT_STUB();
1408 return IEMOP_RAISE_INVALID_OPCODE();
1409}
1410#endif
1411
1412
1413/** Opcode 0x0f 0x01 /1. */
1414FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1415{
1416 IEMOP_MNEMONIC(sidt, "sidt Ms");
1417 IEMOP_HLP_MIN_286();
1418 IEMOP_HLP_64BIT_OP_SIZE();
1419 IEM_MC_BEGIN(2, 1);
1420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1421 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1425 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1426 IEM_MC_END();
1427 return VINF_SUCCESS;
1428}
1429
1430
1431/** Opcode 0x0f 0x01 /1. */
1432FNIEMOP_DEF(iemOp_Grp7_monitor)
1433{
1434 IEMOP_MNEMONIC(monitor, "monitor");
1435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1436 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1437}
1438
1439
1440/** Opcode 0x0f 0x01 /1. */
1441FNIEMOP_DEF(iemOp_Grp7_mwait)
1442{
1443 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1445 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1446}
1447
1448
1449/** Opcode 0x0f 0x01 /2. */
1450FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1451{
1452 IEMOP_MNEMONIC(lgdt, "lgdt");
1453 IEMOP_HLP_64BIT_OP_SIZE();
1454 IEM_MC_BEGIN(3, 1);
1455 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1456 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1457 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1460 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1461 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1462 IEM_MC_END();
1463 return VINF_SUCCESS;
1464}
1465
1466
1467/** Opcode 0x0f 0x01 0xd0. */
1468FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1469{
1470 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1471 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1472 {
1473 /** @todo r=ramshankar: We should use
1474 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1475 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1476 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1478 }
1479 return IEMOP_RAISE_INVALID_OPCODE();
1480}
1481
1482
1483/** Opcode 0x0f 0x01 0xd1. */
1484FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1485{
1486 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1487 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1488 {
1489 /** @todo r=ramshankar: We should use
1490 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1491 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1492 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1494 }
1495 return IEMOP_RAISE_INVALID_OPCODE();
1496}
1497
1498
1499/** Opcode 0x0f 0x01 /3. */
1500FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1501{
1502 IEMOP_MNEMONIC(lidt, "lidt");
1503 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1504 ? IEMMODE_64BIT
1505 : pVCpu->iem.s.enmEffOpSize;
1506 IEM_MC_BEGIN(3, 1);
1507 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1508 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1509 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1513 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1514 IEM_MC_END();
1515 return VINF_SUCCESS;
1516}
1517
1518
1519/** Opcode 0x0f 0x01 0xd8. */
1520#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1521FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1522{
1523 IEMOP_MNEMONIC(vmrun, "vmrun");
1524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1525 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1526}
1527#else
1528FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1529#endif
1530
1531/** Opcode 0x0f 0x01 0xd9. */
1532FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1533{
1534 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1536
1537 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1538 want all hypercalls regardless of instruction used, and if a
1539 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1540 (NEM/win makes ASSUMPTIONS about this behavior.) */
1541 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1542}
1543
1544/** Opcode 0x0f 0x01 0xda. */
1545#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1546FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1547{
1548 IEMOP_MNEMONIC(vmload, "vmload");
1549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1550 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1551}
1552#else
1553FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1554#endif
1555
1556
1557/** Opcode 0x0f 0x01 0xdb. */
1558#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1559FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1560{
1561 IEMOP_MNEMONIC(vmsave, "vmsave");
1562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1563 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1564}
1565#else
1566FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1567#endif
1568
1569
1570/** Opcode 0x0f 0x01 0xdc. */
1571#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1572FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1573{
1574 IEMOP_MNEMONIC(stgi, "stgi");
1575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1576 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1577}
1578#else
1579FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1580#endif
1581
1582
1583/** Opcode 0x0f 0x01 0xdd. */
1584#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1585FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1586{
1587 IEMOP_MNEMONIC(clgi, "clgi");
1588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1589 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1590}
1591#else
1592FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1593#endif
1594
1595
1596/** Opcode 0x0f 0x01 0xdf. */
1597#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1598FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1599{
1600 IEMOP_MNEMONIC(invlpga, "invlpga");
1601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1602 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1603}
1604#else
1605FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1606#endif
1607
1608
1609/** Opcode 0x0f 0x01 0xde. */
1610#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1611FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1612{
1613 IEMOP_MNEMONIC(skinit, "skinit");
1614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1615 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1616}
1617#else
1618FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1619#endif
1620
1621
1622/** Opcode 0x0f 0x01 /4. */
1623FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1624{
1625 IEMOP_MNEMONIC(smsw, "smsw");
1626 IEMOP_HLP_MIN_286();
1627 if (IEM_IS_MODRM_REG_MODE(bRm))
1628 {
1629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1630 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1631 }
1632
1633 /* Ignore operand size here, memory refs are always 16-bit. */
1634 IEM_MC_BEGIN(2, 0);
1635 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1636 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1639 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1640 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1641 IEM_MC_END();
1642 return VINF_SUCCESS;
1643}
1644
1645
1646/** Opcode 0x0f 0x01 /6. */
1647FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1648{
1649 /* The operand size is effectively ignored, all is 16-bit and only the
1650 lower 3-bits are used. */
1651 IEMOP_MNEMONIC(lmsw, "lmsw");
1652 IEMOP_HLP_MIN_286();
1653 if (IEM_IS_MODRM_REG_MODE(bRm))
1654 {
1655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1656 IEM_MC_BEGIN(2, 0);
1657 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1658 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1659 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1660 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1661 IEM_MC_END();
1662 }
1663 else
1664 {
1665 IEM_MC_BEGIN(2, 0);
1666 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1667 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1670 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1671 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x0f 0x01 /7. */
1679FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1680{
1681 IEMOP_MNEMONIC(invlpg, "invlpg");
1682 IEMOP_HLP_MIN_486();
1683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1684 IEM_MC_BEGIN(1, 1);
1685 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1687 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1688 IEM_MC_END();
1689 return VINF_SUCCESS;
1690}
1691
1692
1693/** Opcode 0x0f 0x01 /7. */
1694FNIEMOP_DEF(iemOp_Grp7_swapgs)
1695{
1696 IEMOP_MNEMONIC(swapgs, "swapgs");
1697 IEMOP_HLP_ONLY_64BIT();
1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1699 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1700}
1701
1702
1703/** Opcode 0x0f 0x01 /7. */
1704FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1705{
1706 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1708 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1709}
1710
1711
1712/**
1713 * Group 7 jump table, memory variant.
1714 */
1715IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1716{
1717 iemOp_Grp7_sgdt,
1718 iemOp_Grp7_sidt,
1719 iemOp_Grp7_lgdt,
1720 iemOp_Grp7_lidt,
1721 iemOp_Grp7_smsw,
1722 iemOp_InvalidWithRM,
1723 iemOp_Grp7_lmsw,
1724 iemOp_Grp7_invlpg
1725};
1726
1727
1728/** Opcode 0x0f 0x01. */
1729FNIEMOP_DEF(iemOp_Grp7)
1730{
1731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1732 if (IEM_IS_MODRM_MEM_MODE(bRm))
1733 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1734
1735 switch (IEM_GET_MODRM_REG_8(bRm))
1736 {
1737 case 0:
1738 switch (IEM_GET_MODRM_RM_8(bRm))
1739 {
1740 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1741 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1742 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1743 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1744 }
1745 return IEMOP_RAISE_INVALID_OPCODE();
1746
1747 case 1:
1748 switch (IEM_GET_MODRM_RM_8(bRm))
1749 {
1750 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1751 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1752 }
1753 return IEMOP_RAISE_INVALID_OPCODE();
1754
1755 case 2:
1756 switch (IEM_GET_MODRM_RM_8(bRm))
1757 {
1758 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1759 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1760 }
1761 return IEMOP_RAISE_INVALID_OPCODE();
1762
1763 case 3:
1764 switch (IEM_GET_MODRM_RM_8(bRm))
1765 {
1766 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1767 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1768 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1769 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1770 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1771 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1772 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1773 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1775 }
1776
1777 case 4:
1778 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1779
1780 case 5:
1781 return IEMOP_RAISE_INVALID_OPCODE();
1782
1783 case 6:
1784 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1785
1786 case 7:
1787 switch (IEM_GET_MODRM_RM_8(bRm))
1788 {
1789 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1790 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1791 }
1792 return IEMOP_RAISE_INVALID_OPCODE();
1793
1794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1795 }
1796}
1797
1798/** Opcode 0x0f 0x00 /3. */
1799FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1800{
1801 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803
1804 if (IEM_IS_MODRM_REG_MODE(bRm))
1805 {
1806 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1807 switch (pVCpu->iem.s.enmEffOpSize)
1808 {
1809 case IEMMODE_16BIT:
1810 {
1811 IEM_MC_BEGIN(3, 0);
1812 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1813 IEM_MC_ARG(uint16_t, u16Sel, 1);
1814 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1815
1816 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1817 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1818 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1819
1820 IEM_MC_END();
1821 return VINF_SUCCESS;
1822 }
1823
1824 case IEMMODE_32BIT:
1825 case IEMMODE_64BIT:
1826 {
1827 IEM_MC_BEGIN(3, 0);
1828 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1829 IEM_MC_ARG(uint16_t, u16Sel, 1);
1830 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1831
1832 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1833 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1834 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1835
1836 IEM_MC_END();
1837 return VINF_SUCCESS;
1838 }
1839
1840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1841 }
1842 }
1843 else
1844 {
1845 switch (pVCpu->iem.s.enmEffOpSize)
1846 {
1847 case IEMMODE_16BIT:
1848 {
1849 IEM_MC_BEGIN(3, 1);
1850 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1851 IEM_MC_ARG(uint16_t, u16Sel, 1);
1852 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1854
1855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1856 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1857
1858 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1859 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1860 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1861
1862 IEM_MC_END();
1863 return VINF_SUCCESS;
1864 }
1865
1866 case IEMMODE_32BIT:
1867 case IEMMODE_64BIT:
1868 {
1869 IEM_MC_BEGIN(3, 1);
1870 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1871 IEM_MC_ARG(uint16_t, u16Sel, 1);
1872 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1874
1875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1876 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1877/** @todo testcase: make sure it's a 16-bit read. */
1878
1879 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1882
1883 IEM_MC_END();
1884 return VINF_SUCCESS;
1885 }
1886
1887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1888 }
1889 }
1890}
1891
1892
1893
1894/** Opcode 0x0f 0x02. */
1895FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1896{
1897 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1898 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1899}
1900
1901
1902/** Opcode 0x0f 0x03. */
1903FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1904{
1905 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1906 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1907}
1908
1909
1910/** Opcode 0x0f 0x05. */
1911FNIEMOP_DEF(iemOp_syscall)
1912{
1913 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1915 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1916}
1917
1918
1919/** Opcode 0x0f 0x06. */
1920FNIEMOP_DEF(iemOp_clts)
1921{
1922 IEMOP_MNEMONIC(clts, "clts");
1923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1924 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1925}
1926
1927
1928/** Opcode 0x0f 0x07. */
1929FNIEMOP_DEF(iemOp_sysret)
1930{
1931 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1933 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1934}
1935
1936
1937/** Opcode 0x0f 0x08. */
1938FNIEMOP_DEF(iemOp_invd)
1939{
1940 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1941 IEMOP_HLP_MIN_486();
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1944}
1945
1946
1947/** Opcode 0x0f 0x09. */
1948FNIEMOP_DEF(iemOp_wbinvd)
1949{
1950 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1951 IEMOP_HLP_MIN_486();
1952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1953 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1954}
1955
1956
1957/** Opcode 0x0f 0x0b. */
1958FNIEMOP_DEF(iemOp_ud2)
1959{
1960 IEMOP_MNEMONIC(ud2, "ud2");
1961 return IEMOP_RAISE_INVALID_OPCODE();
1962}
1963
1964/** Opcode 0x0f 0x0d. */
1965FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1966{
1967 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1968 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1969 {
1970 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1971 return IEMOP_RAISE_INVALID_OPCODE();
1972 }
1973
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 if (IEM_IS_MODRM_REG_MODE(bRm))
1976 {
1977 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1978 return IEMOP_RAISE_INVALID_OPCODE();
1979 }
1980
1981 switch (IEM_GET_MODRM_REG_8(bRm))
1982 {
1983 case 2: /* Aliased to /0 for the time being. */
1984 case 4: /* Aliased to /0 for the time being. */
1985 case 5: /* Aliased to /0 for the time being. */
1986 case 6: /* Aliased to /0 for the time being. */
1987 case 7: /* Aliased to /0 for the time being. */
1988 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1989 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1990 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1992 }
1993
1994 IEM_MC_BEGIN(0, 1);
1995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 /* Currently a NOP. */
1999 NOREF(GCPtrEffSrc);
2000 IEM_MC_ADVANCE_RIP();
2001 IEM_MC_END();
2002 return VINF_SUCCESS;
2003}
2004
2005
2006/** Opcode 0x0f 0x0e. */
2007FNIEMOP_DEF(iemOp_femms)
2008{
2009 IEMOP_MNEMONIC(femms, "femms");
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011
2012 IEM_MC_BEGIN(0,0);
2013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2015 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2016 IEM_MC_FPU_FROM_MMX_MODE();
2017 IEM_MC_ADVANCE_RIP();
2018 IEM_MC_END();
2019 return VINF_SUCCESS;
2020}
2021
2022
2023/** Opcode 0x0f 0x0f. */
2024FNIEMOP_DEF(iemOp_3Dnow)
2025{
2026 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2027 {
2028 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2029 return IEMOP_RAISE_INVALID_OPCODE();
2030 }
2031
2032#ifdef IEM_WITH_3DNOW
2033 /* This is pretty sparse, use switch instead of table. */
2034 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2035 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2036#else
2037 IEMOP_BITCH_ABOUT_STUB();
2038 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2039#endif
2040}
2041
2042
2043/**
2044 * @opcode 0x10
2045 * @oppfx none
2046 * @opcpuid sse
2047 * @opgroup og_sse_simdfp_datamove
2048 * @opxcpttype 4UA
2049 * @optest op1=1 op2=2 -> op1=2
2050 * @optest op1=0 op2=-22 -> op1=-22
2051 */
2052FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2053{
2054 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2055 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2056 if (IEM_IS_MODRM_REG_MODE(bRm))
2057 {
2058 /*
2059 * Register, register.
2060 */
2061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2062 IEM_MC_BEGIN(0, 0);
2063 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2064 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2065 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2066 IEM_GET_MODRM_RM(pVCpu, bRm));
2067 IEM_MC_ADVANCE_RIP();
2068 IEM_MC_END();
2069 }
2070 else
2071 {
2072 /*
2073 * Memory, register.
2074 */
2075 IEM_MC_BEGIN(0, 2);
2076 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2081 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP();
2088 IEM_MC_END();
2089 }
2090 return VINF_SUCCESS;
2091
2092}
2093
2094
2095/**
2096 * @opcode 0x10
2097 * @oppfx 0x66
2098 * @opcpuid sse2
2099 * @opgroup og_sse2_pcksclr_datamove
2100 * @opxcpttype 4UA
2101 * @optest op1=1 op2=2 -> op1=2
2102 * @optest op1=0 op2=-42 -> op1=-42
2103 */
2104FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2105{
2106 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2108 if (IEM_IS_MODRM_REG_MODE(bRm))
2109 {
2110 /*
2111 * Register, register.
2112 */
2113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2114 IEM_MC_BEGIN(0, 0);
2115 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2116 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2117 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2118 IEM_GET_MODRM_RM(pVCpu, bRm));
2119 IEM_MC_ADVANCE_RIP();
2120 IEM_MC_END();
2121 }
2122 else
2123 {
2124 /*
2125 * Memory, register.
2126 */
2127 IEM_MC_BEGIN(0, 2);
2128 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2130
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2133 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2135
2136 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2137 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2138
2139 IEM_MC_ADVANCE_RIP();
2140 IEM_MC_END();
2141 }
2142 return VINF_SUCCESS;
2143}
2144
2145
2146/**
2147 * @opcode 0x10
2148 * @oppfx 0xf3
2149 * @opcpuid sse
2150 * @opgroup og_sse_simdfp_datamove
2151 * @opxcpttype 5
2152 * @optest op1=1 op2=2 -> op1=2
2153 * @optest op1=0 op2=-22 -> op1=-22
2154 */
2155FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2156{
2157 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2159 if (IEM_IS_MODRM_REG_MODE(bRm))
2160 {
2161 /*
2162 * Register, register.
2163 */
2164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2165 IEM_MC_BEGIN(0, 1);
2166 IEM_MC_LOCAL(uint32_t, uSrc);
2167
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2170 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2171 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2172
2173 IEM_MC_ADVANCE_RIP();
2174 IEM_MC_END();
2175 }
2176 else
2177 {
2178 /*
2179 * Memory, register.
2180 */
2181 IEM_MC_BEGIN(0, 2);
2182 IEM_MC_LOCAL(uint32_t, uSrc);
2183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2184
2185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2187 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2188 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2189
2190 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2191 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2192
2193 IEM_MC_ADVANCE_RIP();
2194 IEM_MC_END();
2195 }
2196 return VINF_SUCCESS;
2197}
2198
2199
2200/**
2201 * @opcode 0x10
2202 * @oppfx 0xf2
2203 * @opcpuid sse2
2204 * @opgroup og_sse2_pcksclr_datamove
2205 * @opxcpttype 5
2206 * @optest op1=1 op2=2 -> op1=2
2207 * @optest op1=0 op2=-42 -> op1=-42
2208 */
2209FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2210{
2211 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2213 if (IEM_IS_MODRM_REG_MODE(bRm))
2214 {
2215 /*
2216 * Register, register.
2217 */
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_BEGIN(0, 1);
2220 IEM_MC_LOCAL(uint64_t, uSrc);
2221
2222 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2224 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2225 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2226
2227 IEM_MC_ADVANCE_RIP();
2228 IEM_MC_END();
2229 }
2230 else
2231 {
2232 /*
2233 * Memory, register.
2234 */
2235 IEM_MC_BEGIN(0, 2);
2236 IEM_MC_LOCAL(uint64_t, uSrc);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2243
2244 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2245 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2246
2247 IEM_MC_ADVANCE_RIP();
2248 IEM_MC_END();
2249 }
2250 return VINF_SUCCESS;
2251}
2252
2253
2254/**
2255 * @opcode 0x11
2256 * @oppfx none
2257 * @opcpuid sse
2258 * @opgroup og_sse_simdfp_datamove
2259 * @opxcpttype 4UA
2260 * @optest op1=1 op2=2 -> op1=2
2261 * @optest op1=0 op2=-42 -> op1=-42
2262 */
2263FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2264{
2265 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2267 if (IEM_IS_MODRM_REG_MODE(bRm))
2268 {
2269 /*
2270 * Register, register.
2271 */
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_BEGIN(0, 0);
2274 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2275 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2276 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2277 IEM_GET_MODRM_REG(pVCpu, bRm));
2278 IEM_MC_ADVANCE_RIP();
2279 IEM_MC_END();
2280 }
2281 else
2282 {
2283 /*
2284 * Memory, register.
2285 */
2286 IEM_MC_BEGIN(0, 2);
2287 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2289
2290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2294
2295 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2296 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2297
2298 IEM_MC_ADVANCE_RIP();
2299 IEM_MC_END();
2300 }
2301 return VINF_SUCCESS;
2302}
2303
2304
2305/**
2306 * @opcode 0x11
2307 * @oppfx 0x66
2308 * @opcpuid sse2
2309 * @opgroup og_sse2_pcksclr_datamove
2310 * @opxcpttype 4UA
2311 * @optest op1=1 op2=2 -> op1=2
2312 * @optest op1=0 op2=-42 -> op1=-42
2313 */
2314FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2315{
2316 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2318 if (IEM_IS_MODRM_REG_MODE(bRm))
2319 {
2320 /*
2321 * Register, register.
2322 */
2323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2324 IEM_MC_BEGIN(0, 0);
2325 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2326 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2327 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2328 IEM_GET_MODRM_REG(pVCpu, bRm));
2329 IEM_MC_ADVANCE_RIP();
2330 IEM_MC_END();
2331 }
2332 else
2333 {
2334 /*
2335 * Memory, register.
2336 */
2337 IEM_MC_BEGIN(0, 2);
2338 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2340
2341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2344 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2345
2346 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2347 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2348
2349 IEM_MC_ADVANCE_RIP();
2350 IEM_MC_END();
2351 }
2352 return VINF_SUCCESS;
2353}
2354
2355
2356/**
2357 * @opcode 0x11
2358 * @oppfx 0xf3
2359 * @opcpuid sse
2360 * @opgroup og_sse_simdfp_datamove
2361 * @opxcpttype 5
2362 * @optest op1=1 op2=2 -> op1=2
2363 * @optest op1=0 op2=-22 -> op1=-22
2364 */
2365FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2366{
2367 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2369 if (IEM_IS_MODRM_REG_MODE(bRm))
2370 {
2371 /*
2372 * Register, register.
2373 */
2374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2375 IEM_MC_BEGIN(0, 1);
2376 IEM_MC_LOCAL(uint32_t, uSrc);
2377
2378 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2379 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2380 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2381 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2382
2383 IEM_MC_ADVANCE_RIP();
2384 IEM_MC_END();
2385 }
2386 else
2387 {
2388 /*
2389 * Memory, register.
2390 */
2391 IEM_MC_BEGIN(0, 2);
2392 IEM_MC_LOCAL(uint32_t, uSrc);
2393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2394
2395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2399
2400 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2401 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2402
2403 IEM_MC_ADVANCE_RIP();
2404 IEM_MC_END();
2405 }
2406 return VINF_SUCCESS;
2407}
2408
2409
2410/**
2411 * @opcode 0x11
2412 * @oppfx 0xf2
2413 * @opcpuid sse2
2414 * @opgroup og_sse2_pcksclr_datamove
2415 * @opxcpttype 5
2416 * @optest op1=1 op2=2 -> op1=2
2417 * @optest op1=0 op2=-42 -> op1=-42
2418 */
2419FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2420{
2421 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2423 if (IEM_IS_MODRM_REG_MODE(bRm))
2424 {
2425 /*
2426 * Register, register.
2427 */
2428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2429 IEM_MC_BEGIN(0, 1);
2430 IEM_MC_LOCAL(uint64_t, uSrc);
2431
2432 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2433 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2434 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2435 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2436
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 else
2441 {
2442 /*
2443 * Memory, register.
2444 */
2445 IEM_MC_BEGIN(0, 2);
2446 IEM_MC_LOCAL(uint64_t, uSrc);
2447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2448
2449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2453
2454 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2455 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2456
2457 IEM_MC_ADVANCE_RIP();
2458 IEM_MC_END();
2459 }
2460 return VINF_SUCCESS;
2461}
2462
2463
2464FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2465{
2466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2467 if (IEM_IS_MODRM_REG_MODE(bRm))
2468 {
2469 /**
2470 * @opcode 0x12
2471 * @opcodesub 11 mr/reg
2472 * @oppfx none
2473 * @opcpuid sse
2474 * @opgroup og_sse_simdfp_datamove
2475 * @opxcpttype 5
2476 * @optest op1=1 op2=2 -> op1=2
2477 * @optest op1=0 op2=-42 -> op1=-42
2478 */
2479 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2480
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482 IEM_MC_BEGIN(0, 1);
2483 IEM_MC_LOCAL(uint64_t, uSrc);
2484
2485 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2487 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2488 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2489
2490 IEM_MC_ADVANCE_RIP();
2491 IEM_MC_END();
2492 }
2493 else
2494 {
2495 /**
2496 * @opdone
2497 * @opcode 0x12
2498 * @opcodesub !11 mr/reg
2499 * @oppfx none
2500 * @opcpuid sse
2501 * @opgroup og_sse_simdfp_datamove
2502 * @opxcpttype 5
2503 * @optest op1=1 op2=2 -> op1=2
2504 * @optest op1=0 op2=-42 -> op1=-42
2505 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2506 */
2507 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2508
2509 IEM_MC_BEGIN(0, 2);
2510 IEM_MC_LOCAL(uint64_t, uSrc);
2511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2512
2513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2517
2518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2520
2521 IEM_MC_ADVANCE_RIP();
2522 IEM_MC_END();
2523 }
2524 return VINF_SUCCESS;
2525}
2526
2527
2528/**
2529 * @opcode 0x12
2530 * @opcodesub !11 mr/reg
2531 * @oppfx 0x66
2532 * @opcpuid sse2
2533 * @opgroup og_sse2_pcksclr_datamove
2534 * @opxcpttype 5
2535 * @optest op1=1 op2=2 -> op1=2
2536 * @optest op1=0 op2=-42 -> op1=-42
2537 */
2538FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2539{
2540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2541 if (IEM_IS_MODRM_MEM_MODE(bRm))
2542 {
2543 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2544
2545 IEM_MC_BEGIN(0, 2);
2546 IEM_MC_LOCAL(uint64_t, uSrc);
2547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2548
2549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2551 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2553
2554 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2555 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2556
2557 IEM_MC_ADVANCE_RIP();
2558 IEM_MC_END();
2559 return VINF_SUCCESS;
2560 }
2561
2562 /**
2563 * @opdone
2564 * @opmnemonic ud660f12m3
2565 * @opcode 0x12
2566 * @opcodesub 11 mr/reg
2567 * @oppfx 0x66
2568 * @opunused immediate
2569 * @opcpuid sse
2570 * @optest ->
2571 */
2572 return IEMOP_RAISE_INVALID_OPCODE();
2573}
2574
2575
2576/**
2577 * @opcode 0x12
2578 * @oppfx 0xf3
2579 * @opcpuid sse3
2580 * @opgroup og_sse3_pcksclr_datamove
2581 * @opxcpttype 4
2582 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2583 * op1=0x00000002000000020000000100000001
2584 */
2585FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2586{
2587 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2589 if (IEM_IS_MODRM_REG_MODE(bRm))
2590 {
2591 /*
2592 * Register, register.
2593 */
2594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2595 IEM_MC_BEGIN(2, 0);
2596 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2597 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2598
2599 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2600 IEM_MC_PREPARE_SSE_USAGE();
2601
2602 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2603 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2604 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2605
2606 IEM_MC_ADVANCE_RIP();
2607 IEM_MC_END();
2608 }
2609 else
2610 {
2611 /*
2612 * Register, memory.
2613 */
2614 IEM_MC_BEGIN(2, 2);
2615 IEM_MC_LOCAL(RTUINT128U, uSrc);
2616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2617 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2618 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2619
2620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2622 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2623 IEM_MC_PREPARE_SSE_USAGE();
2624
2625 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2626 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2627 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2628
2629 IEM_MC_ADVANCE_RIP();
2630 IEM_MC_END();
2631 }
2632 return VINF_SUCCESS;
2633}
2634
2635
2636/**
2637 * @opcode 0x12
2638 * @oppfx 0xf2
2639 * @opcpuid sse3
2640 * @opgroup og_sse3_pcksclr_datamove
2641 * @opxcpttype 5
2642 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2643 * op1=0x22222222111111112222222211111111
2644 */
2645FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2646{
2647 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2649 if (IEM_IS_MODRM_REG_MODE(bRm))
2650 {
2651 /*
2652 * Register, register.
2653 */
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2655 IEM_MC_BEGIN(2, 0);
2656 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2657 IEM_MC_ARG(uint64_t, uSrc, 1);
2658
2659 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2660 IEM_MC_PREPARE_SSE_USAGE();
2661
2662 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2663 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2664 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2665
2666 IEM_MC_ADVANCE_RIP();
2667 IEM_MC_END();
2668 }
2669 else
2670 {
2671 /*
2672 * Register, memory.
2673 */
2674 IEM_MC_BEGIN(2, 2);
2675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2676 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2677 IEM_MC_ARG(uint64_t, uSrc, 1);
2678
2679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2681 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2682 IEM_MC_PREPARE_SSE_USAGE();
2683
2684 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2685 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2686 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2687
2688 IEM_MC_ADVANCE_RIP();
2689 IEM_MC_END();
2690 }
2691 return VINF_SUCCESS;
2692}
2693
2694
2695/**
2696 * @opcode 0x13
2697 * @opcodesub !11 mr/reg
2698 * @oppfx none
2699 * @opcpuid sse
2700 * @opgroup og_sse_simdfp_datamove
2701 * @opxcpttype 5
2702 * @optest op1=1 op2=2 -> op1=2
2703 * @optest op1=0 op2=-42 -> op1=-42
2704 */
2705FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2706{
2707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2708 if (IEM_IS_MODRM_MEM_MODE(bRm))
2709 {
2710 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2711
2712 IEM_MC_BEGIN(0, 2);
2713 IEM_MC_LOCAL(uint64_t, uSrc);
2714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2715
2716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2718 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2719 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2720
2721 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2722 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2723
2724 IEM_MC_ADVANCE_RIP();
2725 IEM_MC_END();
2726 return VINF_SUCCESS;
2727 }
2728
2729 /**
2730 * @opdone
2731 * @opmnemonic ud0f13m3
2732 * @opcode 0x13
2733 * @opcodesub 11 mr/reg
2734 * @oppfx none
2735 * @opunused immediate
2736 * @opcpuid sse
2737 * @optest ->
2738 */
2739 return IEMOP_RAISE_INVALID_OPCODE();
2740}
2741
2742
2743/**
2744 * @opcode 0x13
2745 * @opcodesub !11 mr/reg
2746 * @oppfx 0x66
2747 * @opcpuid sse2
2748 * @opgroup og_sse2_pcksclr_datamove
2749 * @opxcpttype 5
2750 * @optest op1=1 op2=2 -> op1=2
2751 * @optest op1=0 op2=-42 -> op1=-42
2752 */
2753FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2754{
2755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2756 if (IEM_IS_MODRM_MEM_MODE(bRm))
2757 {
2758 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2759 IEM_MC_BEGIN(0, 2);
2760 IEM_MC_LOCAL(uint64_t, uSrc);
2761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2762
2763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2765 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2766 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2767
2768 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2769 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2770
2771 IEM_MC_ADVANCE_RIP();
2772 IEM_MC_END();
2773 return VINF_SUCCESS;
2774 }
2775
2776 /**
2777 * @opdone
2778 * @opmnemonic ud660f13m3
2779 * @opcode 0x13
2780 * @opcodesub 11 mr/reg
2781 * @oppfx 0x66
2782 * @opunused immediate
2783 * @opcpuid sse
2784 * @optest ->
2785 */
2786 return IEMOP_RAISE_INVALID_OPCODE();
2787}
2788
2789
2790/**
2791 * @opmnemonic udf30f13
2792 * @opcode 0x13
2793 * @oppfx 0xf3
2794 * @opunused intel-modrm
2795 * @opcpuid sse
2796 * @optest ->
2797 * @opdone
2798 */
2799
2800/**
2801 * @opmnemonic udf20f13
2802 * @opcode 0x13
2803 * @oppfx 0xf2
2804 * @opunused intel-modrm
2805 * @opcpuid sse
2806 * @optest ->
2807 * @opdone
2808 */
2809
2810/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2811FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2812{
2813 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2814 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2815}
2816
2817
2818/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2819FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2820{
2821 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2822 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2823}
2824
2825
2826/**
2827 * @opdone
2828 * @opmnemonic udf30f14
2829 * @opcode 0x14
2830 * @oppfx 0xf3
2831 * @opunused intel-modrm
2832 * @opcpuid sse
2833 * @optest ->
2834 * @opdone
2835 */
2836
2837/**
2838 * @opmnemonic udf20f14
2839 * @opcode 0x14
2840 * @oppfx 0xf2
2841 * @opunused intel-modrm
2842 * @opcpuid sse
2843 * @optest ->
2844 * @opdone
2845 */
2846
2847/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2848FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2849{
2850 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2851 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2852}
2853
2854
2855/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2856FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2857{
2858 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2859 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2860}
2861
2862
2863/* Opcode 0xf3 0x0f 0x15 - invalid */
2864/* Opcode 0xf2 0x0f 0x15 - invalid */
2865
2866/**
2867 * @opdone
2868 * @opmnemonic udf30f15
2869 * @opcode 0x15
2870 * @oppfx 0xf3
2871 * @opunused intel-modrm
2872 * @opcpuid sse
2873 * @optest ->
2874 * @opdone
2875 */
2876
2877/**
2878 * @opmnemonic udf20f15
2879 * @opcode 0x15
2880 * @oppfx 0xf2
2881 * @opunused intel-modrm
2882 * @opcpuid sse
2883 * @optest ->
2884 * @opdone
2885 */
2886
2887FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2888{
2889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2890 if (IEM_IS_MODRM_REG_MODE(bRm))
2891 {
2892 /**
2893 * @opcode 0x16
2894 * @opcodesub 11 mr/reg
2895 * @oppfx none
2896 * @opcpuid sse
2897 * @opgroup og_sse_simdfp_datamove
2898 * @opxcpttype 5
2899 * @optest op1=1 op2=2 -> op1=2
2900 * @optest op1=0 op2=-42 -> op1=-42
2901 */
2902 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2903
2904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2905 IEM_MC_BEGIN(0, 1);
2906 IEM_MC_LOCAL(uint64_t, uSrc);
2907
2908 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2909 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2910 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2911 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2912
2913 IEM_MC_ADVANCE_RIP();
2914 IEM_MC_END();
2915 }
2916 else
2917 {
2918 /**
2919 * @opdone
2920 * @opcode 0x16
2921 * @opcodesub !11 mr/reg
2922 * @oppfx none
2923 * @opcpuid sse
2924 * @opgroup og_sse_simdfp_datamove
2925 * @opxcpttype 5
2926 * @optest op1=1 op2=2 -> op1=2
2927 * @optest op1=0 op2=-42 -> op1=-42
2928 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2929 */
2930 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2931
2932 IEM_MC_BEGIN(0, 2);
2933 IEM_MC_LOCAL(uint64_t, uSrc);
2934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2935
2936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2938 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2939 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2940
2941 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2942 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2943
2944 IEM_MC_ADVANCE_RIP();
2945 IEM_MC_END();
2946 }
2947 return VINF_SUCCESS;
2948}
2949
2950
2951/**
2952 * @opcode 0x16
2953 * @opcodesub !11 mr/reg
2954 * @oppfx 0x66
2955 * @opcpuid sse2
2956 * @opgroup og_sse2_pcksclr_datamove
2957 * @opxcpttype 5
2958 * @optest op1=1 op2=2 -> op1=2
2959 * @optest op1=0 op2=-42 -> op1=-42
2960 */
2961FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2962{
2963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2964 if (IEM_IS_MODRM_MEM_MODE(bRm))
2965 {
2966 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2967 IEM_MC_BEGIN(0, 2);
2968 IEM_MC_LOCAL(uint64_t, uSrc);
2969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2970
2971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2974 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2975
2976 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2977 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2978
2979 IEM_MC_ADVANCE_RIP();
2980 IEM_MC_END();
2981 return VINF_SUCCESS;
2982 }
2983
2984 /**
2985 * @opdone
2986 * @opmnemonic ud660f16m3
2987 * @opcode 0x16
2988 * @opcodesub 11 mr/reg
2989 * @oppfx 0x66
2990 * @opunused immediate
2991 * @opcpuid sse
2992 * @optest ->
2993 */
2994 return IEMOP_RAISE_INVALID_OPCODE();
2995}
2996
2997
2998/**
2999 * @opcode 0x16
3000 * @oppfx 0xf3
3001 * @opcpuid sse3
3002 * @opgroup og_sse3_pcksclr_datamove
3003 * @opxcpttype 4
3004 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3005 * op1=0x00000002000000020000000100000001
3006 */
3007FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3008{
3009 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3011 if (IEM_IS_MODRM_REG_MODE(bRm))
3012 {
3013 /*
3014 * Register, register.
3015 */
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3017 IEM_MC_BEGIN(2, 0);
3018 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3019 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3020
3021 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3022 IEM_MC_PREPARE_SSE_USAGE();
3023
3024 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3025 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3026 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3027
3028 IEM_MC_ADVANCE_RIP();
3029 IEM_MC_END();
3030 }
3031 else
3032 {
3033 /*
3034 * Register, memory.
3035 */
3036 IEM_MC_BEGIN(2, 2);
3037 IEM_MC_LOCAL(RTUINT128U, uSrc);
3038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3039 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3040 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3041
3042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3044 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3045 IEM_MC_PREPARE_SSE_USAGE();
3046
3047 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3048 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3049 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3050
3051 IEM_MC_ADVANCE_RIP();
3052 IEM_MC_END();
3053 }
3054 return VINF_SUCCESS;
3055}
3056
3057/**
3058 * @opdone
3059 * @opmnemonic udf30f16
3060 * @opcode 0x16
3061 * @oppfx 0xf2
3062 * @opunused intel-modrm
3063 * @opcpuid sse
3064 * @optest ->
3065 * @opdone
3066 */
3067
3068
3069/**
3070 * @opcode 0x17
3071 * @opcodesub !11 mr/reg
3072 * @oppfx none
3073 * @opcpuid sse
3074 * @opgroup og_sse_simdfp_datamove
3075 * @opxcpttype 5
3076 * @optest op1=1 op2=2 -> op1=2
3077 * @optest op1=0 op2=-42 -> op1=-42
3078 */
3079FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3080{
3081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3082 if (IEM_IS_MODRM_MEM_MODE(bRm))
3083 {
3084 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3085
3086 IEM_MC_BEGIN(0, 2);
3087 IEM_MC_LOCAL(uint64_t, uSrc);
3088 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3089
3090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3092 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3093 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3094
3095 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3096 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3097
3098 IEM_MC_ADVANCE_RIP();
3099 IEM_MC_END();
3100 return VINF_SUCCESS;
3101 }
3102
3103 /**
3104 * @opdone
3105 * @opmnemonic ud0f17m3
3106 * @opcode 0x17
3107 * @opcodesub 11 mr/reg
3108 * @oppfx none
3109 * @opunused immediate
3110 * @opcpuid sse
3111 * @optest ->
3112 */
3113 return IEMOP_RAISE_INVALID_OPCODE();
3114}
3115
3116
3117/**
3118 * @opcode 0x17
3119 * @opcodesub !11 mr/reg
3120 * @oppfx 0x66
3121 * @opcpuid sse2
3122 * @opgroup og_sse2_pcksclr_datamove
3123 * @opxcpttype 5
3124 * @optest op1=1 op2=2 -> op1=2
3125 * @optest op1=0 op2=-42 -> op1=-42
3126 */
3127FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3128{
3129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3130 if (IEM_IS_MODRM_MEM_MODE(bRm))
3131 {
3132 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3133
3134 IEM_MC_BEGIN(0, 2);
3135 IEM_MC_LOCAL(uint64_t, uSrc);
3136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3137
3138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3140 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3141 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3142
3143 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3144 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3145
3146 IEM_MC_ADVANCE_RIP();
3147 IEM_MC_END();
3148 return VINF_SUCCESS;
3149 }
3150
3151 /**
3152 * @opdone
3153 * @opmnemonic ud660f17m3
3154 * @opcode 0x17
3155 * @opcodesub 11 mr/reg
3156 * @oppfx 0x66
3157 * @opunused immediate
3158 * @opcpuid sse
3159 * @optest ->
3160 */
3161 return IEMOP_RAISE_INVALID_OPCODE();
3162}
3163
3164
3165/**
3166 * @opdone
3167 * @opmnemonic udf30f17
3168 * @opcode 0x17
3169 * @oppfx 0xf3
3170 * @opunused intel-modrm
3171 * @opcpuid sse
3172 * @optest ->
3173 * @opdone
3174 */
3175
3176/**
3177 * @opmnemonic udf20f17
3178 * @opcode 0x17
3179 * @oppfx 0xf2
3180 * @opunused intel-modrm
3181 * @opcpuid sse
3182 * @optest ->
3183 * @opdone
3184 */
3185
3186
3187/** Opcode 0x0f 0x18. */
3188FNIEMOP_DEF(iemOp_prefetch_Grp16)
3189{
3190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3191 if (IEM_IS_MODRM_MEM_MODE(bRm))
3192 {
3193 switch (IEM_GET_MODRM_REG_8(bRm))
3194 {
3195 case 4: /* Aliased to /0 for the time being according to AMD. */
3196 case 5: /* Aliased to /0 for the time being according to AMD. */
3197 case 6: /* Aliased to /0 for the time being according to AMD. */
3198 case 7: /* Aliased to /0 for the time being according to AMD. */
3199 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3200 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3201 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3202 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3204 }
3205
3206 IEM_MC_BEGIN(0, 1);
3207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3210 /* Currently a NOP. */
3211 NOREF(GCPtrEffSrc);
3212 IEM_MC_ADVANCE_RIP();
3213 IEM_MC_END();
3214 return VINF_SUCCESS;
3215 }
3216
3217 return IEMOP_RAISE_INVALID_OPCODE();
3218}
3219
3220
3221/** Opcode 0x0f 0x19..0x1f. */
3222FNIEMOP_DEF(iemOp_nop_Ev)
3223{
3224 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3226 if (IEM_IS_MODRM_REG_MODE(bRm))
3227 {
3228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3229 IEM_MC_BEGIN(0, 0);
3230 IEM_MC_ADVANCE_RIP();
3231 IEM_MC_END();
3232 }
3233 else
3234 {
3235 IEM_MC_BEGIN(0, 1);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3239 /* Currently a NOP. */
3240 NOREF(GCPtrEffSrc);
3241 IEM_MC_ADVANCE_RIP();
3242 IEM_MC_END();
3243 }
3244 return VINF_SUCCESS;
3245}
3246
3247
3248/** Opcode 0x0f 0x20. */
3249FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3250{
3251 /* mod is ignored, as is operand size overrides. */
3252 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3253 IEMOP_HLP_MIN_386();
3254 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3255 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3256 else
3257 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3258
3259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3260 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3261 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3262 {
3263 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3264 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3265 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3266 iCrReg |= 8;
3267 }
3268 switch (iCrReg)
3269 {
3270 case 0: case 2: case 3: case 4: case 8:
3271 break;
3272 default:
3273 return IEMOP_RAISE_INVALID_OPCODE();
3274 }
3275 IEMOP_HLP_DONE_DECODING();
3276
3277 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3278}
3279
3280
3281/** Opcode 0x0f 0x21. */
3282FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3283{
3284 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3285 IEMOP_HLP_MIN_386();
3286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3288 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3289 return IEMOP_RAISE_INVALID_OPCODE();
3290 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3291 IEM_GET_MODRM_RM(pVCpu, bRm),
3292 IEM_GET_MODRM_REG_8(bRm));
3293}
3294
3295
3296/** Opcode 0x0f 0x22. */
3297FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3298{
3299 /* mod is ignored, as is operand size overrides. */
3300 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3301 IEMOP_HLP_MIN_386();
3302 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3303 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3304 else
3305 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3306
3307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3308 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3310 {
3311 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3312 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3313 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3314 iCrReg |= 8;
3315 }
3316 switch (iCrReg)
3317 {
3318 case 0: case 2: case 3: case 4: case 8:
3319 break;
3320 default:
3321 return IEMOP_RAISE_INVALID_OPCODE();
3322 }
3323 IEMOP_HLP_DONE_DECODING();
3324
3325 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3326}
3327
3328
3329/** Opcode 0x0f 0x23. */
3330FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3331{
3332 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3333 IEMOP_HLP_MIN_386();
3334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3337 return IEMOP_RAISE_INVALID_OPCODE();
3338 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3339 IEM_GET_MODRM_REG_8(bRm),
3340 IEM_GET_MODRM_RM(pVCpu, bRm));
3341}
3342
3343
3344/** Opcode 0x0f 0x24. */
3345FNIEMOP_DEF(iemOp_mov_Rd_Td)
3346{
3347 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3348 IEMOP_HLP_MIN_386();
3349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3351 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3352 return IEMOP_RAISE_INVALID_OPCODE();
3353 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3354 IEM_GET_MODRM_RM(pVCpu, bRm),
3355 IEM_GET_MODRM_REG_8(bRm));
3356}
3357
3358
3359/** Opcode 0x0f 0x26. */
3360FNIEMOP_DEF(iemOp_mov_Td_Rd)
3361{
3362 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3363 IEMOP_HLP_MIN_386();
3364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3366 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3367 return IEMOP_RAISE_INVALID_OPCODE();
3368 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3369 IEM_GET_MODRM_REG_8(bRm),
3370 IEM_GET_MODRM_RM(pVCpu, bRm));
3371}
3372
3373
3374/**
3375 * @opcode 0x28
3376 * @oppfx none
3377 * @opcpuid sse
3378 * @opgroup og_sse_simdfp_datamove
3379 * @opxcpttype 1
3380 * @optest op1=1 op2=2 -> op1=2
3381 * @optest op1=0 op2=-42 -> op1=-42
3382 */
3383FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3384{
3385 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3387 if (IEM_IS_MODRM_REG_MODE(bRm))
3388 {
3389 /*
3390 * Register, register.
3391 */
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 IEM_MC_BEGIN(0, 0);
3394 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3395 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3396 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3397 IEM_GET_MODRM_RM(pVCpu, bRm));
3398 IEM_MC_ADVANCE_RIP();
3399 IEM_MC_END();
3400 }
3401 else
3402 {
3403 /*
3404 * Register, memory.
3405 */
3406 IEM_MC_BEGIN(0, 2);
3407 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3409
3410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3412 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3413 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3414
3415 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3416 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3417
3418 IEM_MC_ADVANCE_RIP();
3419 IEM_MC_END();
3420 }
3421 return VINF_SUCCESS;
3422}
3423
3424/**
3425 * @opcode 0x28
3426 * @oppfx 66
3427 * @opcpuid sse2
3428 * @opgroup og_sse2_pcksclr_datamove
3429 * @opxcpttype 1
3430 * @optest op1=1 op2=2 -> op1=2
3431 * @optest op1=0 op2=-42 -> op1=-42
3432 */
3433FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3434{
3435 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3436 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3437 if (IEM_IS_MODRM_REG_MODE(bRm))
3438 {
3439 /*
3440 * Register, register.
3441 */
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3443 IEM_MC_BEGIN(0, 0);
3444 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3446 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3447 IEM_GET_MODRM_RM(pVCpu, bRm));
3448 IEM_MC_ADVANCE_RIP();
3449 IEM_MC_END();
3450 }
3451 else
3452 {
3453 /*
3454 * Register, memory.
3455 */
3456 IEM_MC_BEGIN(0, 2);
3457 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3459
3460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3462 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3463 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3464
3465 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3466 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3467
3468 IEM_MC_ADVANCE_RIP();
3469 IEM_MC_END();
3470 }
3471 return VINF_SUCCESS;
3472}
3473
3474/* Opcode 0xf3 0x0f 0x28 - invalid */
3475/* Opcode 0xf2 0x0f 0x28 - invalid */
3476
3477/**
3478 * @opcode 0x29
3479 * @oppfx none
3480 * @opcpuid sse
3481 * @opgroup og_sse_simdfp_datamove
3482 * @opxcpttype 1
3483 * @optest op1=1 op2=2 -> op1=2
3484 * @optest op1=0 op2=-42 -> op1=-42
3485 */
3486FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3487{
3488 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3490 if (IEM_IS_MODRM_REG_MODE(bRm))
3491 {
3492 /*
3493 * Register, register.
3494 */
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_BEGIN(0, 0);
3497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3498 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3499 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3500 IEM_GET_MODRM_REG(pVCpu, bRm));
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 else
3505 {
3506 /*
3507 * Memory, register.
3508 */
3509 IEM_MC_BEGIN(0, 2);
3510 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3512
3513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3516 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3517
3518 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3519 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3520
3521 IEM_MC_ADVANCE_RIP();
3522 IEM_MC_END();
3523 }
3524 return VINF_SUCCESS;
3525}
3526
3527/**
3528 * @opcode 0x29
3529 * @oppfx 66
3530 * @opcpuid sse2
3531 * @opgroup og_sse2_pcksclr_datamove
3532 * @opxcpttype 1
3533 * @optest op1=1 op2=2 -> op1=2
3534 * @optest op1=0 op2=-42 -> op1=-42
3535 */
3536FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3537{
3538 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3540 if (IEM_IS_MODRM_REG_MODE(bRm))
3541 {
3542 /*
3543 * Register, register.
3544 */
3545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3546 IEM_MC_BEGIN(0, 0);
3547 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3549 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3550 IEM_GET_MODRM_REG(pVCpu, bRm));
3551 IEM_MC_ADVANCE_RIP();
3552 IEM_MC_END();
3553 }
3554 else
3555 {
3556 /*
3557 * Memory, register.
3558 */
3559 IEM_MC_BEGIN(0, 2);
3560 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3562
3563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3565 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3566 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3567
3568 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3569 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3570
3571 IEM_MC_ADVANCE_RIP();
3572 IEM_MC_END();
3573 }
3574 return VINF_SUCCESS;
3575}
3576
3577/* Opcode 0xf3 0x0f 0x29 - invalid */
3578/* Opcode 0xf2 0x0f 0x29 - invalid */
3579
3580
3581/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3582FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3583/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3584FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3585/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3586FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
3587/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3588FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
3589
3590
3591/**
3592 * @opcode 0x2b
3593 * @opcodesub !11 mr/reg
3594 * @oppfx none
3595 * @opcpuid sse
3596 * @opgroup og_sse1_cachect
3597 * @opxcpttype 1
3598 * @optest op1=1 op2=2 -> op1=2
3599 * @optest op1=0 op2=-42 -> op1=-42
3600 */
3601FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3602{
3603 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3605 if (IEM_IS_MODRM_MEM_MODE(bRm))
3606 {
3607 /*
3608 * memory, register.
3609 */
3610 IEM_MC_BEGIN(0, 2);
3611 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3613
3614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3616 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3617 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3618
3619 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3620 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3621
3622 IEM_MC_ADVANCE_RIP();
3623 IEM_MC_END();
3624 }
3625 /* The register, register encoding is invalid. */
3626 else
3627 return IEMOP_RAISE_INVALID_OPCODE();
3628 return VINF_SUCCESS;
3629}
3630
3631/**
3632 * @opcode 0x2b
3633 * @opcodesub !11 mr/reg
3634 * @oppfx 0x66
3635 * @opcpuid sse2
3636 * @opgroup og_sse2_cachect
3637 * @opxcpttype 1
3638 * @optest op1=1 op2=2 -> op1=2
3639 * @optest op1=0 op2=-42 -> op1=-42
3640 */
3641FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3642{
3643 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3645 if (IEM_IS_MODRM_MEM_MODE(bRm))
3646 {
3647 /*
3648 * memory, register.
3649 */
3650 IEM_MC_BEGIN(0, 2);
3651 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3653
3654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3656 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3657 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3658
3659 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3660 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3661
3662 IEM_MC_ADVANCE_RIP();
3663 IEM_MC_END();
3664 }
3665 /* The register, register encoding is invalid. */
3666 else
3667 return IEMOP_RAISE_INVALID_OPCODE();
3668 return VINF_SUCCESS;
3669}
3670/* Opcode 0xf3 0x0f 0x2b - invalid */
3671/* Opcode 0xf2 0x0f 0x2b - invalid */
3672
3673
3674/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3675FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3676/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3677FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3678/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3679FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
3680/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
3681FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
3682
3683/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
3684FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
3685/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
3686FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
3687/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
3688FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
3689/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
3690FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
3691
3692/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
3693FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
3694/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
3695FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
3696/* Opcode 0xf3 0x0f 0x2e - invalid */
3697/* Opcode 0xf2 0x0f 0x2e - invalid */
3698
3699/** Opcode 0x0f 0x2f - comiss Vss, Wss */
3700FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
3701/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
3702FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
3703/* Opcode 0xf3 0x0f 0x2f - invalid */
3704/* Opcode 0xf2 0x0f 0x2f - invalid */
3705
3706/** Opcode 0x0f 0x30. */
3707FNIEMOP_DEF(iemOp_wrmsr)
3708{
3709 IEMOP_MNEMONIC(wrmsr, "wrmsr");
3710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3711 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
3712}
3713
3714
3715/** Opcode 0x0f 0x31. */
3716FNIEMOP_DEF(iemOp_rdtsc)
3717{
3718 IEMOP_MNEMONIC(rdtsc, "rdtsc");
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
3721}
3722
3723
3724/** Opcode 0x0f 0x33. */
3725FNIEMOP_DEF(iemOp_rdmsr)
3726{
3727 IEMOP_MNEMONIC(rdmsr, "rdmsr");
3728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3729 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
3730}
3731
3732
3733/** Opcode 0x0f 0x34. */
3734FNIEMOP_DEF(iemOp_rdpmc)
3735{
3736 IEMOP_MNEMONIC(rdpmc, "rdpmc");
3737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3738 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
3739}
3740
3741
3742/** Opcode 0x0f 0x34. */
3743FNIEMOP_DEF(iemOp_sysenter)
3744{
3745 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3747 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
3748}
3749
3750/** Opcode 0x0f 0x35. */
3751FNIEMOP_DEF(iemOp_sysexit)
3752{
3753 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
3756}
3757
3758/** Opcode 0x0f 0x37. */
3759FNIEMOP_STUB(iemOp_getsec);
3760
3761
3762/** Opcode 0x0f 0x38. */
3763FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
3764{
3765#ifdef IEM_WITH_THREE_0F_38
3766 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3767 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3768#else
3769 IEMOP_BITCH_ABOUT_STUB();
3770 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3771#endif
3772}
3773
3774
3775/** Opcode 0x0f 0x3a. */
3776FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
3777{
3778#ifdef IEM_WITH_THREE_0F_3A
3779 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3780 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3781#else
3782 IEMOP_BITCH_ABOUT_STUB();
3783 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3784#endif
3785}
3786
3787
3788/**
3789 * Implements a conditional move.
3790 *
3791 * Wish there was an obvious way to do this where we could share and reduce
3792 * code bloat.
3793 *
3794 * @param a_Cnd The conditional "microcode" operation.
3795 */
3796#define CMOV_X(a_Cnd) \
3797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3798 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3799 { \
3800 switch (pVCpu->iem.s.enmEffOpSize) \
3801 { \
3802 case IEMMODE_16BIT: \
3803 IEM_MC_BEGIN(0, 1); \
3804 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3805 a_Cnd { \
3806 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3807 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3808 } IEM_MC_ENDIF(); \
3809 IEM_MC_ADVANCE_RIP(); \
3810 IEM_MC_END(); \
3811 return VINF_SUCCESS; \
3812 \
3813 case IEMMODE_32BIT: \
3814 IEM_MC_BEGIN(0, 1); \
3815 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3816 a_Cnd { \
3817 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3818 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3819 } IEM_MC_ELSE() { \
3820 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3821 } IEM_MC_ENDIF(); \
3822 IEM_MC_ADVANCE_RIP(); \
3823 IEM_MC_END(); \
3824 return VINF_SUCCESS; \
3825 \
3826 case IEMMODE_64BIT: \
3827 IEM_MC_BEGIN(0, 1); \
3828 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3829 a_Cnd { \
3830 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3831 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3832 } IEM_MC_ENDIF(); \
3833 IEM_MC_ADVANCE_RIP(); \
3834 IEM_MC_END(); \
3835 return VINF_SUCCESS; \
3836 \
3837 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3838 } \
3839 } \
3840 else \
3841 { \
3842 switch (pVCpu->iem.s.enmEffOpSize) \
3843 { \
3844 case IEMMODE_16BIT: \
3845 IEM_MC_BEGIN(0, 2); \
3846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3847 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3849 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3850 a_Cnd { \
3851 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3852 } IEM_MC_ENDIF(); \
3853 IEM_MC_ADVANCE_RIP(); \
3854 IEM_MC_END(); \
3855 return VINF_SUCCESS; \
3856 \
3857 case IEMMODE_32BIT: \
3858 IEM_MC_BEGIN(0, 2); \
3859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3860 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3862 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3863 a_Cnd { \
3864 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3865 } IEM_MC_ELSE() { \
3866 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3867 } IEM_MC_ENDIF(); \
3868 IEM_MC_ADVANCE_RIP(); \
3869 IEM_MC_END(); \
3870 return VINF_SUCCESS; \
3871 \
3872 case IEMMODE_64BIT: \
3873 IEM_MC_BEGIN(0, 2); \
3874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3875 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3877 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3878 a_Cnd { \
3879 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3880 } IEM_MC_ENDIF(); \
3881 IEM_MC_ADVANCE_RIP(); \
3882 IEM_MC_END(); \
3883 return VINF_SUCCESS; \
3884 \
3885 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3886 } \
3887 } do {} while (0)
3888
3889
3890
3891/** Opcode 0x0f 0x40. */
3892FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3893{
3894 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3895 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3896}
3897
3898
3899/** Opcode 0x0f 0x41. */
3900FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3901{
3902 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3903 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3904}
3905
3906
3907/** Opcode 0x0f 0x42. */
3908FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3909{
3910 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3911 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3912}
3913
3914
3915/** Opcode 0x0f 0x43. */
3916FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3917{
3918 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3919 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3920}
3921
3922
3923/** Opcode 0x0f 0x44. */
3924FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3925{
3926 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3927 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3928}
3929
3930
3931/** Opcode 0x0f 0x45. */
3932FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3933{
3934 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3935 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3936}
3937
3938
3939/** Opcode 0x0f 0x46. */
3940FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3941{
3942 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3943 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3944}
3945
3946
3947/** Opcode 0x0f 0x47. */
3948FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3949{
3950 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3951 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3952}
3953
3954
3955/** Opcode 0x0f 0x48. */
3956FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3957{
3958 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3959 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3960}
3961
3962
3963/** Opcode 0x0f 0x49. */
3964FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3965{
3966 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3967 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3968}
3969
3970
3971/** Opcode 0x0f 0x4a. */
3972FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3973{
3974 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3975 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3976}
3977
3978
3979/** Opcode 0x0f 0x4b. */
3980FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3981{
3982 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3983 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3984}
3985
3986
3987/** Opcode 0x0f 0x4c. */
3988FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3989{
3990 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3991 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3992}
3993
3994
3995/** Opcode 0x0f 0x4d. */
3996FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3997{
3998 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3999 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
4000}
4001
4002
4003/** Opcode 0x0f 0x4e. */
4004FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
4005{
4006 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
4007 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
4008}
4009
4010
4011/** Opcode 0x0f 0x4f. */
4012FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
4013{
4014 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
4015 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
4016}
4017
4018#undef CMOV_X
4019
4020/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
4021FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
4022/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
4023FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
4024/* Opcode 0xf3 0x0f 0x50 - invalid */
4025/* Opcode 0xf2 0x0f 0x50 - invalid */
4026
4027
4028/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
4029FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
4030{
4031 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4032 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
4033}
4034
4035
4036/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
4037FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
4038{
4039 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4040 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
4041}
4042
4043
4044/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
4045FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
4046{
4047 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4048 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
4049}
4050
4051
4052/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
4053FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
4054{
4055 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4056 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
4057}
4058
4059
4060/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
4061FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
4062/* Opcode 0x66 0x0f 0x52 - invalid */
4063/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
4064FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
4065/* Opcode 0xf2 0x0f 0x52 - invalid */
4066
4067/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
4068FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
4069/* Opcode 0x66 0x0f 0x53 - invalid */
4070/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
4071FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
4072/* Opcode 0xf2 0x0f 0x53 - invalid */
4073
4074
4075/** Opcode 0x0f 0x54 - andps Vps, Wps */
4076FNIEMOP_DEF(iemOp_andps_Vps_Wps)
4077{
4078 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4079 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
4080}
4081
4082
4083/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
4084FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
4085{
4086 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4087 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
4088}
4089
4090
4091/* Opcode 0xf3 0x0f 0x54 - invalid */
4092/* Opcode 0xf2 0x0f 0x54 - invalid */
4093
4094
4095/** Opcode 0x0f 0x55 - andnps Vps, Wps */
4096FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
4097{
4098 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4099 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
4100}
4101
4102
4103/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
4104FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
4105{
4106 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4107 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
4108}
4109
4110
4111/* Opcode 0xf3 0x0f 0x55 - invalid */
4112/* Opcode 0xf2 0x0f 0x55 - invalid */
4113
4114
4115/** Opcode 0x0f 0x56 - orps Vps, Wps */
4116FNIEMOP_DEF(iemOp_orps_Vps_Wps)
4117{
4118 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4119 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
4120}
4121
4122
4123/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
4124FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
4125{
4126 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4127 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
4128}
4129
4130
4131/* Opcode 0xf3 0x0f 0x56 - invalid */
4132/* Opcode 0xf2 0x0f 0x56 - invalid */
4133
4134
4135/** Opcode 0x0f 0x57 - xorps Vps, Wps */
4136FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
4137{
4138 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4139 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
4140}
4141
4142
4143/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
4144FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
4145{
4146 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4147 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
4148}
4149
4150
4151/* Opcode 0xf3 0x0f 0x57 - invalid */
4152/* Opcode 0xf2 0x0f 0x57 - invalid */
4153
4154/** Opcode 0x0f 0x58 - addps Vps, Wps */
4155FNIEMOP_DEF(iemOp_addps_Vps_Wps)
4156{
4157 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4158 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
4159}
4160
4161
4162/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
4163FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
4164{
4165 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4166 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
4167}
4168
4169
4170/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
4171FNIEMOP_DEF(iemOp_addss_Vss_Wss)
4172{
4173 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4174 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
4175}
4176
4177
4178/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
4179FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
4180{
4181 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4182 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
4183}
4184
4185
4186/** Opcode 0x0f 0x59 - mulps Vps, Wps */
4187FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
4188{
4189 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4190 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
4191}
4192
4193
4194/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
4195FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
4196{
4197 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4198 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
4199}
4200
4201
4202/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
4203FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
4204{
4205 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4206 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
4207}
4208
4209
4210/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
4211FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
4212{
4213 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4214 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
4215}
4216
4217
4218/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
4219FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
4220{
4221 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
4222 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
4223}
4224
4225
4226/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
4227FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
4228{
4229 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
4230 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
4231}
4232
4233
4234/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
4235FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
4236{
4237 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
4238 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
4239}
4240
4241
4242/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
4243FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
4244{
4245 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
4246 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
4247}
4248
4249
4250/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
4251FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
4252/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
4253FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
4254/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
4255FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
4256/* Opcode 0xf2 0x0f 0x5b - invalid */
4257
4258
4259/** Opcode 0x0f 0x5c - subps Vps, Wps */
4260FNIEMOP_DEF(iemOp_subps_Vps_Wps)
4261{
4262 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4263 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
4264}
4265
4266
4267/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
4268FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
4269{
4270 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4271 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
4272}
4273
4274
4275/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
4276FNIEMOP_DEF(iemOp_subss_Vss_Wss)
4277{
4278 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4279 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
4280}
4281
4282
4283/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
4284FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
4285{
4286 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4287 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
4288}
4289
4290
4291/** Opcode 0x0f 0x5d - minps Vps, Wps */
4292FNIEMOP_DEF(iemOp_minps_Vps_Wps)
4293{
4294 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4295 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
4296}
4297
4298
4299/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
4300FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
4301{
4302 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4303 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
4304}
4305
4306
4307/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
4308FNIEMOP_DEF(iemOp_minss_Vss_Wss)
4309{
4310 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4311 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
4312}
4313
4314
4315/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
4316FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
4317{
4318 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4319 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
4320}
4321
4322
4323/** Opcode 0x0f 0x5e - divps Vps, Wps */
4324FNIEMOP_DEF(iemOp_divps_Vps_Wps)
4325{
4326 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4327 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
4328}
4329
4330
4331/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
4332FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
4333{
4334 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4335 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
4336}
4337
4338
4339/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
4340FNIEMOP_DEF(iemOp_divss_Vss_Wss)
4341{
4342 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4343 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
4344}
4345
4346
4347/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
4348FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
4349{
4350 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4351 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
4352}
4353
4354
4355/** Opcode 0x0f 0x5f - maxps Vps, Wps */
4356FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
4357{
4358 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4359 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
4360}
4361
4362
4363/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
4364FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
4365{
4366 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4367 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
4368}
4369
4370
4371/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
4372FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
4373{
4374 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4375 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
4376}
4377
4378
4379/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
4380FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
4381{
4382 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4383 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
4384}
4385
4386
4387/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
4388FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
4389{
4390 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4391 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
4392}
4393
4394
4395/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
4396FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
4397{
4398 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4399 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
4400}
4401
4402
4403/* Opcode 0xf3 0x0f 0x60 - invalid */
4404
4405
4406/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
4407FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
4408{
4409 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
4410 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4411 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
4412}
4413
4414
4415/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
4416FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
4417{
4418 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4419 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
4420}
4421
4422
4423/* Opcode 0xf3 0x0f 0x61 - invalid */
4424
4425
4426/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
4427FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
4428{
4429 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4430 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
4431}
4432
4433
4434/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
4435FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
4436{
4437 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4438 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
4439}
4440
4441
4442/* Opcode 0xf3 0x0f 0x62 - invalid */
4443
4444
4445
4446/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
4447FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
4448{
4449 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4450 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
4451}
4452
4453
4454/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
4455FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
4456{
4457 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4458 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
4459}
4460
4461
4462/* Opcode 0xf3 0x0f 0x63 - invalid */
4463
4464
4465/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
4466FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
4467{
4468 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4469 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
4470}
4471
4472
4473/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
4474FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
4475{
4476 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4477 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
4478}
4479
4480
4481/* Opcode 0xf3 0x0f 0x64 - invalid */
4482
4483
4484/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
4485FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
4486{
4487 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4488 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
4489}
4490
4491
4492/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
4493FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
4494{
4495 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4496 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
4497}
4498
4499
4500/* Opcode 0xf3 0x0f 0x65 - invalid */
4501
4502
4503/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
4504FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
4505{
4506 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4507 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
4508}
4509
4510
4511/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
4512FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
4513{
4514 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4515 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
4516}
4517
4518
4519/* Opcode 0xf3 0x0f 0x66 - invalid */
4520
4521
4522/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
4523FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
4524{
4525 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4526 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
4527}
4528
4529
4530/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
4531FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
4532{
4533 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4534 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
4535}
4536
4537
4538/* Opcode 0xf3 0x0f 0x67 - invalid */
4539
4540
4541/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
4542 * @note Intel and AMD both uses Qd for the second parameter, however they
4543 * both list it as a mmX/mem64 operand and intel describes it as being
4544 * loaded as a qword, so it should be Qq, shouldn't it? */
4545FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
4546{
4547 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4548 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
4549}
4550
4551
4552/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
4553FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
4554{
4555 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4556 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
4557}
4558
4559
4560/* Opcode 0xf3 0x0f 0x68 - invalid */
4561
4562
4563/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
4564 * @note Intel and AMD both uses Qd for the second parameter, however they
4565 * both list it as a mmX/mem64 operand and intel describes it as being
4566 * loaded as a qword, so it should be Qq, shouldn't it? */
4567FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
4568{
4569 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4570 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
4571}
4572
4573
4574/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
4575FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
4576{
4577 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4578 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
4579
4580}
4581
4582
4583/* Opcode 0xf3 0x0f 0x69 - invalid */
4584
4585
4586/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
4587 * @note Intel and AMD both uses Qd for the second parameter, however they
4588 * both list it as a mmX/mem64 operand and intel describes it as being
4589 * loaded as a qword, so it should be Qq, shouldn't it? */
4590FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
4591{
4592 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4593 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
4594}
4595
4596
4597/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
4598FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
4599{
4600 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4601 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
4602}
4603
4604
4605/* Opcode 0xf3 0x0f 0x6a - invalid */
4606
4607
4608/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
4609FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
4610{
4611 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4612 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
4613}
4614
4615
4616/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
4617FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
4618{
4619 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4620 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
4621}
4622
4623
4624/* Opcode 0xf3 0x0f 0x6b - invalid */
4625
4626
4627/* Opcode 0x0f 0x6c - invalid */
4628
4629
4630/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
4631FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
4632{
4633 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4634 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
4635}
4636
4637
4638/* Opcode 0xf3 0x0f 0x6c - invalid */
4639/* Opcode 0xf2 0x0f 0x6c - invalid */
4640
4641
4642/* Opcode 0x0f 0x6d - invalid */
4643
4644
4645/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
4646FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
4647{
4648 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4649 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
4650}
4651
4652
4653/* Opcode 0xf3 0x0f 0x6d - invalid */
4654
4655
4656FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
4657{
4658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4660 {
4661 /**
4662 * @opcode 0x6e
4663 * @opcodesub rex.w=1
4664 * @oppfx none
4665 * @opcpuid mmx
4666 * @opgroup og_mmx_datamove
4667 * @opxcpttype 5
4668 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4669 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4670 */
4671 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4672 if (IEM_IS_MODRM_REG_MODE(bRm))
4673 {
4674 /* MMX, greg64 */
4675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4676 IEM_MC_BEGIN(0, 1);
4677 IEM_MC_LOCAL(uint64_t, u64Tmp);
4678
4679 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4680 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4681
4682 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4683 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4684 IEM_MC_FPU_TO_MMX_MODE();
4685
4686 IEM_MC_ADVANCE_RIP();
4687 IEM_MC_END();
4688 }
4689 else
4690 {
4691 /* MMX, [mem64] */
4692 IEM_MC_BEGIN(0, 2);
4693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4694 IEM_MC_LOCAL(uint64_t, u64Tmp);
4695
4696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4698 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4699 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4700
4701 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4702 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4703 IEM_MC_FPU_TO_MMX_MODE();
4704
4705 IEM_MC_ADVANCE_RIP();
4706 IEM_MC_END();
4707 }
4708 }
4709 else
4710 {
4711 /**
4712 * @opdone
4713 * @opcode 0x6e
4714 * @opcodesub rex.w=0
4715 * @oppfx none
4716 * @opcpuid mmx
4717 * @opgroup og_mmx_datamove
4718 * @opxcpttype 5
4719 * @opfunction iemOp_movd_q_Pd_Ey
4720 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4721 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4722 */
4723 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4724 if (IEM_IS_MODRM_REG_MODE(bRm))
4725 {
4726 /* MMX, greg */
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728 IEM_MC_BEGIN(0, 1);
4729 IEM_MC_LOCAL(uint64_t, u64Tmp);
4730
4731 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4732 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4733
4734 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4735 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4736 IEM_MC_FPU_TO_MMX_MODE();
4737
4738 IEM_MC_ADVANCE_RIP();
4739 IEM_MC_END();
4740 }
4741 else
4742 {
4743 /* MMX, [mem] */
4744 IEM_MC_BEGIN(0, 2);
4745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4746 IEM_MC_LOCAL(uint32_t, u32Tmp);
4747
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4751 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4752
4753 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4754 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4755 IEM_MC_FPU_TO_MMX_MODE();
4756
4757 IEM_MC_ADVANCE_RIP();
4758 IEM_MC_END();
4759 }
4760 }
4761 return VINF_SUCCESS;
4762}
4763
4764FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4765{
4766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4767 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4768 {
4769 /**
4770 * @opcode 0x6e
4771 * @opcodesub rex.w=1
4772 * @oppfx 0x66
4773 * @opcpuid sse2
4774 * @opgroup og_sse2_simdint_datamove
4775 * @opxcpttype 5
4776 * @optest 64-bit / op1=1 op2=2 -> op1=2
4777 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4778 */
4779 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4780 if (IEM_IS_MODRM_REG_MODE(bRm))
4781 {
4782 /* XMM, greg64 */
4783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(uint64_t, u64Tmp);
4786
4787 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4788 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4789
4790 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4791 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4792
4793 IEM_MC_ADVANCE_RIP();
4794 IEM_MC_END();
4795 }
4796 else
4797 {
4798 /* XMM, [mem64] */
4799 IEM_MC_BEGIN(0, 2);
4800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4801 IEM_MC_LOCAL(uint64_t, u64Tmp);
4802
4803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4805 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4806 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4807
4808 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4809 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4810
4811 IEM_MC_ADVANCE_RIP();
4812 IEM_MC_END();
4813 }
4814 }
4815 else
4816 {
4817 /**
4818 * @opdone
4819 * @opcode 0x6e
4820 * @opcodesub rex.w=0
4821 * @oppfx 0x66
4822 * @opcpuid sse2
4823 * @opgroup og_sse2_simdint_datamove
4824 * @opxcpttype 5
4825 * @opfunction iemOp_movd_q_Vy_Ey
4826 * @optest op1=1 op2=2 -> op1=2
4827 * @optest op1=0 op2=-42 -> op1=-42
4828 */
4829 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4830 if (IEM_IS_MODRM_REG_MODE(bRm))
4831 {
4832 /* XMM, greg32 */
4833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4834 IEM_MC_BEGIN(0, 1);
4835 IEM_MC_LOCAL(uint32_t, u32Tmp);
4836
4837 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4838 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4839
4840 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4841 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4842
4843 IEM_MC_ADVANCE_RIP();
4844 IEM_MC_END();
4845 }
4846 else
4847 {
4848 /* XMM, [mem32] */
4849 IEM_MC_BEGIN(0, 2);
4850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4851 IEM_MC_LOCAL(uint32_t, u32Tmp);
4852
4853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4856 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4857
4858 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4859 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4860
4861 IEM_MC_ADVANCE_RIP();
4862 IEM_MC_END();
4863 }
4864 }
4865 return VINF_SUCCESS;
4866}
4867
4868/* Opcode 0xf3 0x0f 0x6e - invalid */
4869
4870
4871/**
4872 * @opcode 0x6f
4873 * @oppfx none
4874 * @opcpuid mmx
4875 * @opgroup og_mmx_datamove
4876 * @opxcpttype 5
4877 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4878 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4879 */
4880FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4881{
4882 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4884 if (IEM_IS_MODRM_REG_MODE(bRm))
4885 {
4886 /*
4887 * Register, register.
4888 */
4889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4890 IEM_MC_BEGIN(0, 1);
4891 IEM_MC_LOCAL(uint64_t, u64Tmp);
4892
4893 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4894 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4895
4896 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4897 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4898 IEM_MC_FPU_TO_MMX_MODE();
4899
4900 IEM_MC_ADVANCE_RIP();
4901 IEM_MC_END();
4902 }
4903 else
4904 {
4905 /*
4906 * Register, memory.
4907 */
4908 IEM_MC_BEGIN(0, 2);
4909 IEM_MC_LOCAL(uint64_t, u64Tmp);
4910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4911
4912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4914 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4915 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4916
4917 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4918 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4919 IEM_MC_FPU_TO_MMX_MODE();
4920
4921 IEM_MC_ADVANCE_RIP();
4922 IEM_MC_END();
4923 }
4924 return VINF_SUCCESS;
4925}
4926
4927/**
4928 * @opcode 0x6f
4929 * @oppfx 0x66
4930 * @opcpuid sse2
4931 * @opgroup og_sse2_simdint_datamove
4932 * @opxcpttype 1
4933 * @optest op1=1 op2=2 -> op1=2
4934 * @optest op1=0 op2=-42 -> op1=-42
4935 */
4936FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4937{
4938 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4940 if (IEM_IS_MODRM_REG_MODE(bRm))
4941 {
4942 /*
4943 * Register, register.
4944 */
4945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4946 IEM_MC_BEGIN(0, 0);
4947
4948 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4949 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4950
4951 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4952 IEM_GET_MODRM_RM(pVCpu, bRm));
4953 IEM_MC_ADVANCE_RIP();
4954 IEM_MC_END();
4955 }
4956 else
4957 {
4958 /*
4959 * Register, memory.
4960 */
4961 IEM_MC_BEGIN(0, 2);
4962 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4964
4965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4967 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4968 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4969
4970 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4971 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4972
4973 IEM_MC_ADVANCE_RIP();
4974 IEM_MC_END();
4975 }
4976 return VINF_SUCCESS;
4977}
4978
4979/**
4980 * @opcode 0x6f
4981 * @oppfx 0xf3
4982 * @opcpuid sse2
4983 * @opgroup og_sse2_simdint_datamove
4984 * @opxcpttype 4UA
4985 * @optest op1=1 op2=2 -> op1=2
4986 * @optest op1=0 op2=-42 -> op1=-42
4987 */
4988FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4989{
4990 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4992 if (IEM_IS_MODRM_REG_MODE(bRm))
4993 {
4994 /*
4995 * Register, register.
4996 */
4997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4998 IEM_MC_BEGIN(0, 0);
4999 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5000 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5001 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
5002 IEM_GET_MODRM_RM(pVCpu, bRm));
5003 IEM_MC_ADVANCE_RIP();
5004 IEM_MC_END();
5005 }
5006 else
5007 {
5008 /*
5009 * Register, memory.
5010 */
5011 IEM_MC_BEGIN(0, 2);
5012 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5014
5015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5017 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5019 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5020 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5021
5022 IEM_MC_ADVANCE_RIP();
5023 IEM_MC_END();
5024 }
5025 return VINF_SUCCESS;
5026}
5027
5028
5029/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
5030FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
5031{
5032 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5034 if (IEM_IS_MODRM_REG_MODE(bRm))
5035 {
5036 /*
5037 * Register, register.
5038 */
5039 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5041
5042 IEM_MC_BEGIN(3, 0);
5043 IEM_MC_ARG(uint64_t *, pDst, 0);
5044 IEM_MC_ARG(uint64_t const *, pSrc, 1);
5045 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5046 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
5047 IEM_MC_PREPARE_FPU_USAGE();
5048 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5049 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
5050 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
5051 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5052 IEM_MC_FPU_TO_MMX_MODE();
5053 IEM_MC_ADVANCE_RIP();
5054 IEM_MC_END();
5055 }
5056 else
5057 {
5058 /*
5059 * Register, memory.
5060 */
5061 IEM_MC_BEGIN(3, 2);
5062 IEM_MC_ARG(uint64_t *, pDst, 0);
5063 IEM_MC_LOCAL(uint64_t, uSrc);
5064 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
5065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5066
5067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5068 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5069 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5071 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
5072
5073 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5074 IEM_MC_PREPARE_FPU_USAGE();
5075 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5076 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
5077 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5078 IEM_MC_FPU_TO_MMX_MODE();
5079
5080 IEM_MC_ADVANCE_RIP();
5081 IEM_MC_END();
5082 }
5083 return VINF_SUCCESS;
5084}
5085
5086
5087/**
5088 * Common worker for SSE2 instructions on the forms:
5089 * pshufd xmm1, xmm2/mem128, imm8
5090 * pshufhw xmm1, xmm2/mem128, imm8
5091 * pshuflw xmm1, xmm2/mem128, imm8
5092 *
5093 * Proper alignment of the 128-bit operand is enforced.
5094 * Exceptions type 4. SSE2 cpuid checks.
5095 */
5096FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
5097{
5098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5099 if (IEM_IS_MODRM_REG_MODE(bRm))
5100 {
5101 /*
5102 * Register, register.
5103 */
5104 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5106
5107 IEM_MC_BEGIN(3, 0);
5108 IEM_MC_ARG(PRTUINT128U, puDst, 0);
5109 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5110 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5111 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5112 IEM_MC_PREPARE_SSE_USAGE();
5113 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5114 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5115 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
5116 IEM_MC_ADVANCE_RIP();
5117 IEM_MC_END();
5118 }
5119 else
5120 {
5121 /*
5122 * Register, memory.
5123 */
5124 IEM_MC_BEGIN(3, 2);
5125 IEM_MC_ARG(PRTUINT128U, puDst, 0);
5126 IEM_MC_LOCAL(RTUINT128U, uSrc);
5127 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
5128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5129
5130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5131 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5132 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5134 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5135
5136 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5137 IEM_MC_PREPARE_SSE_USAGE();
5138 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5139 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
5140
5141 IEM_MC_ADVANCE_RIP();
5142 IEM_MC_END();
5143 }
5144 return VINF_SUCCESS;
5145}
5146
5147
5148/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
5149FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
5150{
5151 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5152 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
5153}
5154
5155
5156/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
5157FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
5158{
5159 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5160 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
5161}
5162
5163
5164/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
5165FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
5166{
5167 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5168 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
5169}
5170
5171
5172/**
5173 * Common worker for MMX instructions of the form:
5174 * psrlw mm, imm8
5175 * psraw mm, imm8
5176 * psllw mm, imm8
5177 * psrld mm, imm8
5178 * psrad mm, imm8
5179 * pslld mm, imm8
5180 * psrlq mm, imm8
5181 * psllq mm, imm8
5182 *
5183 */
5184FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
5185{
5186 if (IEM_IS_MODRM_REG_MODE(bRm))
5187 {
5188 /*
5189 * Register, immediate.
5190 */
5191 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5193
5194 IEM_MC_BEGIN(2, 0);
5195 IEM_MC_ARG(uint64_t *, pDst, 0);
5196 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
5197 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5198 IEM_MC_PREPARE_FPU_USAGE();
5199 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5200 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
5201 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5202 IEM_MC_FPU_TO_MMX_MODE();
5203 IEM_MC_ADVANCE_RIP();
5204 IEM_MC_END();
5205 }
5206 else
5207 {
5208 /*
5209 * Register, memory not supported.
5210 */
5211 /// @todo Caller already enforced register mode?!
5212 }
5213 return VINF_SUCCESS;
5214}
5215
5216
5217/**
5218 * Common worker for SSE2 instructions of the form:
5219 * psrlw xmm, imm8
5220 * psraw xmm, imm8
5221 * psllw xmm, imm8
5222 * psrld xmm, imm8
5223 * psrad xmm, imm8
5224 * pslld xmm, imm8
5225 * psrlq xmm, imm8
5226 * psllq xmm, imm8
5227 *
5228 */
5229FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
5230{
5231 if (IEM_IS_MODRM_REG_MODE(bRm))
5232 {
5233 /*
5234 * Register, immediate.
5235 */
5236 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5238
5239 IEM_MC_BEGIN(2, 0);
5240 IEM_MC_ARG(PRTUINT128U, pDst, 0);
5241 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
5242 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5243 IEM_MC_PREPARE_SSE_USAGE();
5244 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5245 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
5246 IEM_MC_ADVANCE_RIP();
5247 IEM_MC_END();
5248 }
5249 else
5250 {
5251 /*
5252 * Register, memory.
5253 */
5254 /// @todo Caller already enforced register mode?!
5255 }
5256 return VINF_SUCCESS;
5257}
5258
5259
5260/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
5261FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
5262{
5263// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5264 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
5265}
5266
5267
5268/** Opcode 0x66 0x0f 0x71 11/2. */
5269FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
5270{
5271// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5272 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
5273}
5274
5275
5276/** Opcode 0x0f 0x71 11/4. */
5277FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
5278{
5279// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5280 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
5281}
5282
5283
5284/** Opcode 0x66 0x0f 0x71 11/4. */
5285FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
5286{
5287// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5288 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
5289}
5290
5291
5292/** Opcode 0x0f 0x71 11/6. */
5293FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
5294{
5295// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5296 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
5297}
5298
5299
5300/** Opcode 0x66 0x0f 0x71 11/6. */
5301FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
5302{
5303// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5304 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
5305}
5306
5307
5308/**
5309 * Group 12 jump table for register variant.
5310 */
5311IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
5312{
5313 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5314 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5315 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5316 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5317 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5318 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5319 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5320 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5321};
5322AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
5323
5324
5325/** Opcode 0x0f 0x71. */
5326FNIEMOP_DEF(iemOp_Grp12)
5327{
5328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5329 if (IEM_IS_MODRM_REG_MODE(bRm))
5330 /* register, register */
5331 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5332 + pVCpu->iem.s.idxPrefix], bRm);
5333 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5334}
5335
5336
5337/** Opcode 0x0f 0x72 11/2. */
5338FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
5339{
5340// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5341 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
5342}
5343
5344
5345/** Opcode 0x66 0x0f 0x72 11/2. */
5346FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
5347{
5348// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5349 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
5350}
5351
5352
5353/** Opcode 0x0f 0x72 11/4. */
5354FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
5355{
5356// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5357 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
5358}
5359
5360
5361/** Opcode 0x66 0x0f 0x72 11/4. */
5362FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
5363{
5364// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5365 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
5366}
5367
5368
5369/** Opcode 0x0f 0x72 11/6. */
5370FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
5371{
5372// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5373 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
5374}
5375
5376/** Opcode 0x66 0x0f 0x72 11/6. */
5377FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
5378{
5379// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5380 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
5381}
5382
5383
5384/**
5385 * Group 13 jump table for register variant.
5386 */
5387IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
5388{
5389 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5390 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5391 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5392 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5393 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5394 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5395 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5396 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5397};
5398AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
5399
5400/** Opcode 0x0f 0x72. */
5401FNIEMOP_DEF(iemOp_Grp13)
5402{
5403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5404 if (IEM_IS_MODRM_REG_MODE(bRm))
5405 /* register, register */
5406 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5407 + pVCpu->iem.s.idxPrefix], bRm);
5408 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5409}
5410
5411
5412/** Opcode 0x0f 0x73 11/2. */
5413FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
5414{
5415// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5416 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
5417}
5418
5419
5420/** Opcode 0x66 0x0f 0x73 11/2. */
5421FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
5422{
5423// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5424 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
5425}
5426
5427
5428/** Opcode 0x66 0x0f 0x73 11/3. */
5429FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
5430{
5431// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5432 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
5433}
5434
5435
5436/** Opcode 0x0f 0x73 11/6. */
5437FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
5438{
5439// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5440 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
5441}
5442
5443
5444/** Opcode 0x66 0x0f 0x73 11/6. */
5445FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
5446{
5447// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5448 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
5449}
5450
5451
5452/** Opcode 0x66 0x0f 0x73 11/7. */
5453FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
5454{
5455// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5456 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
5457}
5458
5459/**
5460 * Group 14 jump table for register variant.
5461 */
5462IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
5463{
5464 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5465 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5466 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5467 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5468 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5469 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5470 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5471 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5472};
5473AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
5474
5475
5476/** Opcode 0x0f 0x73. */
5477FNIEMOP_DEF(iemOp_Grp14)
5478{
5479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5480 if (IEM_IS_MODRM_REG_MODE(bRm))
5481 /* register, register */
5482 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5483 + pVCpu->iem.s.idxPrefix], bRm);
5484 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5485}
5486
5487
5488/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
5489FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
5490{
5491 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5492 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
5493}
5494
5495
5496/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
5497FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
5498{
5499 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5500 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
5501}
5502
5503
5504/* Opcode 0xf3 0x0f 0x74 - invalid */
5505/* Opcode 0xf2 0x0f 0x74 - invalid */
5506
5507
5508/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
5509FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
5510{
5511 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5512 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
5513}
5514
5515
5516/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
5517FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
5518{
5519 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5520 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
5521}
5522
5523
5524/* Opcode 0xf3 0x0f 0x75 - invalid */
5525/* Opcode 0xf2 0x0f 0x75 - invalid */
5526
5527
5528/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
5529FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
5530{
5531 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5532 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
5533}
5534
5535
5536/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
5537FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
5538{
5539 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5540 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
5541}
5542
5543
5544/* Opcode 0xf3 0x0f 0x76 - invalid */
5545/* Opcode 0xf2 0x0f 0x76 - invalid */
5546
5547
5548/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
5549FNIEMOP_DEF(iemOp_emms)
5550{
5551 IEMOP_MNEMONIC(emms, "emms");
5552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5553
5554 IEM_MC_BEGIN(0,0);
5555 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
5556 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5557 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5558 IEM_MC_FPU_FROM_MMX_MODE();
5559 IEM_MC_ADVANCE_RIP();
5560 IEM_MC_END();
5561 return VINF_SUCCESS;
5562}
5563
5564/* Opcode 0x66 0x0f 0x77 - invalid */
5565/* Opcode 0xf3 0x0f 0x77 - invalid */
5566/* Opcode 0xf2 0x0f 0x77 - invalid */
5567
5568/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
5569#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5570FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
5571{
5572 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
5573 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
5574 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
5575 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5576
5577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5578 if (IEM_IS_MODRM_REG_MODE(bRm))
5579 {
5580 /*
5581 * Register, register.
5582 */
5583 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5584 if (enmEffOpSize == IEMMODE_64BIT)
5585 {
5586 IEM_MC_BEGIN(2, 0);
5587 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5588 IEM_MC_ARG(uint64_t, u64Enc, 1);
5589 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5590 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5591 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
5592 IEM_MC_END();
5593 }
5594 else
5595 {
5596 IEM_MC_BEGIN(2, 0);
5597 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5598 IEM_MC_ARG(uint32_t, u32Enc, 1);
5599 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5600 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5601 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
5602 IEM_MC_END();
5603 }
5604 }
5605 else
5606 {
5607 /*
5608 * Memory, register.
5609 */
5610 if (enmEffOpSize == IEMMODE_64BIT)
5611 {
5612 IEM_MC_BEGIN(3, 0);
5613 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5614 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5615 IEM_MC_ARG(uint64_t, u64Enc, 2);
5616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5617 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5618 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5619 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5620 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
5621 IEM_MC_END();
5622 }
5623 else
5624 {
5625 IEM_MC_BEGIN(3, 0);
5626 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5627 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5628 IEM_MC_ARG(uint32_t, u32Enc, 2);
5629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5630 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5631 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5632 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5633 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
5634 IEM_MC_END();
5635 }
5636 }
5637 return VINF_SUCCESS;
5638}
5639#else
5640FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
5641#endif
5642
5643/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
5644FNIEMOP_STUB(iemOp_AmdGrp17);
5645/* Opcode 0xf3 0x0f 0x78 - invalid */
5646/* Opcode 0xf2 0x0f 0x78 - invalid */
5647
5648/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
5649#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5650FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
5651{
5652 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
5653 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
5654 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
5655 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5656
5657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5658 if (IEM_IS_MODRM_REG_MODE(bRm))
5659 {
5660 /*
5661 * Register, register.
5662 */
5663 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5664 if (enmEffOpSize == IEMMODE_64BIT)
5665 {
5666 IEM_MC_BEGIN(2, 0);
5667 IEM_MC_ARG(uint64_t, u64Val, 0);
5668 IEM_MC_ARG(uint64_t, u64Enc, 1);
5669 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5670 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5671 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
5672 IEM_MC_END();
5673 }
5674 else
5675 {
5676 IEM_MC_BEGIN(2, 0);
5677 IEM_MC_ARG(uint32_t, u32Val, 0);
5678 IEM_MC_ARG(uint32_t, u32Enc, 1);
5679 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5680 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5681 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
5682 IEM_MC_END();
5683 }
5684 }
5685 else
5686 {
5687 /*
5688 * Register, memory.
5689 */
5690 if (enmEffOpSize == IEMMODE_64BIT)
5691 {
5692 IEM_MC_BEGIN(3, 0);
5693 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5694 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5695 IEM_MC_ARG(uint64_t, u64Enc, 2);
5696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5697 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5698 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5699 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5700 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
5701 IEM_MC_END();
5702 }
5703 else
5704 {
5705 IEM_MC_BEGIN(3, 0);
5706 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5707 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5708 IEM_MC_ARG(uint32_t, u32Enc, 2);
5709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5710 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5711 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5712 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5713 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
5714 IEM_MC_END();
5715 }
5716 }
5717 return VINF_SUCCESS;
5718}
5719#else
5720FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
5721#endif
5722/* Opcode 0x66 0x0f 0x79 - invalid */
5723/* Opcode 0xf3 0x0f 0x79 - invalid */
5724/* Opcode 0xf2 0x0f 0x79 - invalid */
5725
5726/* Opcode 0x0f 0x7a - invalid */
5727/* Opcode 0x66 0x0f 0x7a - invalid */
5728/* Opcode 0xf3 0x0f 0x7a - invalid */
5729/* Opcode 0xf2 0x0f 0x7a - invalid */
5730
5731/* Opcode 0x0f 0x7b - invalid */
5732/* Opcode 0x66 0x0f 0x7b - invalid */
5733/* Opcode 0xf3 0x0f 0x7b - invalid */
5734/* Opcode 0xf2 0x0f 0x7b - invalid */
5735
5736/* Opcode 0x0f 0x7c - invalid */
5737
5738
5739/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5740FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
5741{
5742 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5743 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
5744}
5745
5746
5747/* Opcode 0xf3 0x0f 0x7c - invalid */
5748
5749
5750/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5751FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
5752{
5753 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5754 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
5755}
5756
5757
5758/* Opcode 0x0f 0x7d - invalid */
5759
5760
5761/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5762FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
5763{
5764 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5765 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
5766}
5767
5768
5769/* Opcode 0xf3 0x0f 0x7d - invalid */
5770
5771
5772/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5773FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
5774{
5775 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5776 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
5777}
5778
5779
5780/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5781FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5782{
5783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5784 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5785 {
5786 /**
5787 * @opcode 0x7e
5788 * @opcodesub rex.w=1
5789 * @oppfx none
5790 * @opcpuid mmx
5791 * @opgroup og_mmx_datamove
5792 * @opxcpttype 5
5793 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5794 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5795 */
5796 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5797 if (IEM_IS_MODRM_REG_MODE(bRm))
5798 {
5799 /* greg64, MMX */
5800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5801 IEM_MC_BEGIN(0, 1);
5802 IEM_MC_LOCAL(uint64_t, u64Tmp);
5803
5804 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5805 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5806
5807 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5808 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5809 IEM_MC_FPU_TO_MMX_MODE();
5810
5811 IEM_MC_ADVANCE_RIP();
5812 IEM_MC_END();
5813 }
5814 else
5815 {
5816 /* [mem64], MMX */
5817 IEM_MC_BEGIN(0, 2);
5818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5819 IEM_MC_LOCAL(uint64_t, u64Tmp);
5820
5821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5823 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5824 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5825
5826 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5827 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5828 IEM_MC_FPU_TO_MMX_MODE();
5829
5830 IEM_MC_ADVANCE_RIP();
5831 IEM_MC_END();
5832 }
5833 }
5834 else
5835 {
5836 /**
5837 * @opdone
5838 * @opcode 0x7e
5839 * @opcodesub rex.w=0
5840 * @oppfx none
5841 * @opcpuid mmx
5842 * @opgroup og_mmx_datamove
5843 * @opxcpttype 5
5844 * @opfunction iemOp_movd_q_Pd_Ey
5845 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5846 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5847 */
5848 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5849 if (IEM_IS_MODRM_REG_MODE(bRm))
5850 {
5851 /* greg32, MMX */
5852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5853 IEM_MC_BEGIN(0, 1);
5854 IEM_MC_LOCAL(uint32_t, u32Tmp);
5855
5856 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5857 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5858
5859 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5860 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5861 IEM_MC_FPU_TO_MMX_MODE();
5862
5863 IEM_MC_ADVANCE_RIP();
5864 IEM_MC_END();
5865 }
5866 else
5867 {
5868 /* [mem32], MMX */
5869 IEM_MC_BEGIN(0, 2);
5870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5871 IEM_MC_LOCAL(uint32_t, u32Tmp);
5872
5873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5875 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5876 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5877
5878 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5879 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5880 IEM_MC_FPU_TO_MMX_MODE();
5881
5882 IEM_MC_ADVANCE_RIP();
5883 IEM_MC_END();
5884 }
5885 }
5886 return VINF_SUCCESS;
5887
5888}
5889
5890
5891FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5892{
5893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5894 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5895 {
5896 /**
5897 * @opcode 0x7e
5898 * @opcodesub rex.w=1
5899 * @oppfx 0x66
5900 * @opcpuid sse2
5901 * @opgroup og_sse2_simdint_datamove
5902 * @opxcpttype 5
5903 * @optest 64-bit / op1=1 op2=2 -> op1=2
5904 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5905 */
5906 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5907 if (IEM_IS_MODRM_REG_MODE(bRm))
5908 {
5909 /* greg64, XMM */
5910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5911 IEM_MC_BEGIN(0, 1);
5912 IEM_MC_LOCAL(uint64_t, u64Tmp);
5913
5914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5916
5917 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5918 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5919
5920 IEM_MC_ADVANCE_RIP();
5921 IEM_MC_END();
5922 }
5923 else
5924 {
5925 /* [mem64], XMM */
5926 IEM_MC_BEGIN(0, 2);
5927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5928 IEM_MC_LOCAL(uint64_t, u64Tmp);
5929
5930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5932 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5933 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5934
5935 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5936 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5937
5938 IEM_MC_ADVANCE_RIP();
5939 IEM_MC_END();
5940 }
5941 }
5942 else
5943 {
5944 /**
5945 * @opdone
5946 * @opcode 0x7e
5947 * @opcodesub rex.w=0
5948 * @oppfx 0x66
5949 * @opcpuid sse2
5950 * @opgroup og_sse2_simdint_datamove
5951 * @opxcpttype 5
5952 * @opfunction iemOp_movd_q_Vy_Ey
5953 * @optest op1=1 op2=2 -> op1=2
5954 * @optest op1=0 op2=-42 -> op1=-42
5955 */
5956 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5957 if (IEM_IS_MODRM_REG_MODE(bRm))
5958 {
5959 /* greg32, XMM */
5960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5961 IEM_MC_BEGIN(0, 1);
5962 IEM_MC_LOCAL(uint32_t, u32Tmp);
5963
5964 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5965 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5966
5967 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5968 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5969
5970 IEM_MC_ADVANCE_RIP();
5971 IEM_MC_END();
5972 }
5973 else
5974 {
5975 /* [mem32], XMM */
5976 IEM_MC_BEGIN(0, 2);
5977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5978 IEM_MC_LOCAL(uint32_t, u32Tmp);
5979
5980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5982 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5984
5985 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5986 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5987
5988 IEM_MC_ADVANCE_RIP();
5989 IEM_MC_END();
5990 }
5991 }
5992 return VINF_SUCCESS;
5993
5994}
5995
5996/**
5997 * @opcode 0x7e
5998 * @oppfx 0xf3
5999 * @opcpuid sse2
6000 * @opgroup og_sse2_pcksclr_datamove
6001 * @opxcpttype none
6002 * @optest op1=1 op2=2 -> op1=2
6003 * @optest op1=0 op2=-42 -> op1=-42
6004 */
6005FNIEMOP_DEF(iemOp_movq_Vq_Wq)
6006{
6007 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6009 if (IEM_IS_MODRM_REG_MODE(bRm))
6010 {
6011 /*
6012 * Register, register.
6013 */
6014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6015 IEM_MC_BEGIN(0, 2);
6016 IEM_MC_LOCAL(uint64_t, uSrc);
6017
6018 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6019 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6020
6021 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6022 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
6023
6024 IEM_MC_ADVANCE_RIP();
6025 IEM_MC_END();
6026 }
6027 else
6028 {
6029 /*
6030 * Memory, register.
6031 */
6032 IEM_MC_BEGIN(0, 2);
6033 IEM_MC_LOCAL(uint64_t, uSrc);
6034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6035
6036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6039 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6040
6041 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6042 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
6043
6044 IEM_MC_ADVANCE_RIP();
6045 IEM_MC_END();
6046 }
6047 return VINF_SUCCESS;
6048}
6049
6050/* Opcode 0xf2 0x0f 0x7e - invalid */
6051
6052
6053/** Opcode 0x0f 0x7f - movq Qq, Pq */
6054FNIEMOP_DEF(iemOp_movq_Qq_Pq)
6055{
6056 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
6057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6058 if (IEM_IS_MODRM_REG_MODE(bRm))
6059 {
6060 /*
6061 * Register, register.
6062 */
6063 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
6064 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6066 IEM_MC_BEGIN(0, 1);
6067 IEM_MC_LOCAL(uint64_t, u64Tmp);
6068 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6069 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6070 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6071 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
6072 IEM_MC_FPU_TO_MMX_MODE();
6073 IEM_MC_ADVANCE_RIP();
6074 IEM_MC_END();
6075 }
6076 else
6077 {
6078 /*
6079 * Memory, Register.
6080 */
6081 IEM_MC_BEGIN(0, 2);
6082 IEM_MC_LOCAL(uint64_t, u64Tmp);
6083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6084
6085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6087 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6088 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6089
6090 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6091 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
6092 IEM_MC_FPU_TO_MMX_MODE();
6093
6094 IEM_MC_ADVANCE_RIP();
6095 IEM_MC_END();
6096 }
6097 return VINF_SUCCESS;
6098}
6099
6100/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
6101FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
6102{
6103 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6105 if (IEM_IS_MODRM_REG_MODE(bRm))
6106 {
6107 /*
6108 * Register, register.
6109 */
6110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6111 IEM_MC_BEGIN(0, 0);
6112 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6113 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6114 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
6115 IEM_GET_MODRM_REG(pVCpu, bRm));
6116 IEM_MC_ADVANCE_RIP();
6117 IEM_MC_END();
6118 }
6119 else
6120 {
6121 /*
6122 * Register, memory.
6123 */
6124 IEM_MC_BEGIN(0, 2);
6125 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6127
6128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6130 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6131 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6132
6133 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
6134 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
6135
6136 IEM_MC_ADVANCE_RIP();
6137 IEM_MC_END();
6138 }
6139 return VINF_SUCCESS;
6140}
6141
6142/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
6143FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
6144{
6145 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6147 if (IEM_IS_MODRM_REG_MODE(bRm))
6148 {
6149 /*
6150 * Register, register.
6151 */
6152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6153 IEM_MC_BEGIN(0, 0);
6154 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6155 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6156 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
6157 IEM_GET_MODRM_REG(pVCpu, bRm));
6158 IEM_MC_ADVANCE_RIP();
6159 IEM_MC_END();
6160 }
6161 else
6162 {
6163 /*
6164 * Register, memory.
6165 */
6166 IEM_MC_BEGIN(0, 2);
6167 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6169
6170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6172 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6173 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6174
6175 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
6176 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
6177
6178 IEM_MC_ADVANCE_RIP();
6179 IEM_MC_END();
6180 }
6181 return VINF_SUCCESS;
6182}
6183
6184/* Opcode 0xf2 0x0f 0x7f - invalid */
6185
6186
6187
6188/** Opcode 0x0f 0x80. */
6189FNIEMOP_DEF(iemOp_jo_Jv)
6190{
6191 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
6192 IEMOP_HLP_MIN_386();
6193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6194 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6195 {
6196 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6198
6199 IEM_MC_BEGIN(0, 0);
6200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6201 IEM_MC_REL_JMP_S16(i16Imm);
6202 } IEM_MC_ELSE() {
6203 IEM_MC_ADVANCE_RIP();
6204 } IEM_MC_ENDIF();
6205 IEM_MC_END();
6206 }
6207 else
6208 {
6209 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6211
6212 IEM_MC_BEGIN(0, 0);
6213 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6214 IEM_MC_REL_JMP_S32(i32Imm);
6215 } IEM_MC_ELSE() {
6216 IEM_MC_ADVANCE_RIP();
6217 } IEM_MC_ENDIF();
6218 IEM_MC_END();
6219 }
6220 return VINF_SUCCESS;
6221}
6222
6223
6224/** Opcode 0x0f 0x81. */
6225FNIEMOP_DEF(iemOp_jno_Jv)
6226{
6227 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
6228 IEMOP_HLP_MIN_386();
6229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6230 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6231 {
6232 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6234
6235 IEM_MC_BEGIN(0, 0);
6236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6237 IEM_MC_ADVANCE_RIP();
6238 } IEM_MC_ELSE() {
6239 IEM_MC_REL_JMP_S16(i16Imm);
6240 } IEM_MC_ENDIF();
6241 IEM_MC_END();
6242 }
6243 else
6244 {
6245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6247
6248 IEM_MC_BEGIN(0, 0);
6249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6250 IEM_MC_ADVANCE_RIP();
6251 } IEM_MC_ELSE() {
6252 IEM_MC_REL_JMP_S32(i32Imm);
6253 } IEM_MC_ENDIF();
6254 IEM_MC_END();
6255 }
6256 return VINF_SUCCESS;
6257}
6258
6259
6260/** Opcode 0x0f 0x82. */
6261FNIEMOP_DEF(iemOp_jc_Jv)
6262{
6263 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
6264 IEMOP_HLP_MIN_386();
6265 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6266 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6267 {
6268 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6270
6271 IEM_MC_BEGIN(0, 0);
6272 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6273 IEM_MC_REL_JMP_S16(i16Imm);
6274 } IEM_MC_ELSE() {
6275 IEM_MC_ADVANCE_RIP();
6276 } IEM_MC_ENDIF();
6277 IEM_MC_END();
6278 }
6279 else
6280 {
6281 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6283
6284 IEM_MC_BEGIN(0, 0);
6285 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6286 IEM_MC_REL_JMP_S32(i32Imm);
6287 } IEM_MC_ELSE() {
6288 IEM_MC_ADVANCE_RIP();
6289 } IEM_MC_ENDIF();
6290 IEM_MC_END();
6291 }
6292 return VINF_SUCCESS;
6293}
6294
6295
6296/** Opcode 0x0f 0x83. */
6297FNIEMOP_DEF(iemOp_jnc_Jv)
6298{
6299 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
6300 IEMOP_HLP_MIN_386();
6301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6302 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6303 {
6304 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6306
6307 IEM_MC_BEGIN(0, 0);
6308 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6309 IEM_MC_ADVANCE_RIP();
6310 } IEM_MC_ELSE() {
6311 IEM_MC_REL_JMP_S16(i16Imm);
6312 } IEM_MC_ENDIF();
6313 IEM_MC_END();
6314 }
6315 else
6316 {
6317 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6319
6320 IEM_MC_BEGIN(0, 0);
6321 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6322 IEM_MC_ADVANCE_RIP();
6323 } IEM_MC_ELSE() {
6324 IEM_MC_REL_JMP_S32(i32Imm);
6325 } IEM_MC_ENDIF();
6326 IEM_MC_END();
6327 }
6328 return VINF_SUCCESS;
6329}
6330
6331
6332/** Opcode 0x0f 0x84. */
6333FNIEMOP_DEF(iemOp_je_Jv)
6334{
6335 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
6336 IEMOP_HLP_MIN_386();
6337 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6338 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6339 {
6340 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6342
6343 IEM_MC_BEGIN(0, 0);
6344 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6345 IEM_MC_REL_JMP_S16(i16Imm);
6346 } IEM_MC_ELSE() {
6347 IEM_MC_ADVANCE_RIP();
6348 } IEM_MC_ENDIF();
6349 IEM_MC_END();
6350 }
6351 else
6352 {
6353 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6355
6356 IEM_MC_BEGIN(0, 0);
6357 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6358 IEM_MC_REL_JMP_S32(i32Imm);
6359 } IEM_MC_ELSE() {
6360 IEM_MC_ADVANCE_RIP();
6361 } IEM_MC_ENDIF();
6362 IEM_MC_END();
6363 }
6364 return VINF_SUCCESS;
6365}
6366
6367
6368/** Opcode 0x0f 0x85. */
6369FNIEMOP_DEF(iemOp_jne_Jv)
6370{
6371 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
6372 IEMOP_HLP_MIN_386();
6373 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6374 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6375 {
6376 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378
6379 IEM_MC_BEGIN(0, 0);
6380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6381 IEM_MC_ADVANCE_RIP();
6382 } IEM_MC_ELSE() {
6383 IEM_MC_REL_JMP_S16(i16Imm);
6384 } IEM_MC_ENDIF();
6385 IEM_MC_END();
6386 }
6387 else
6388 {
6389 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391
6392 IEM_MC_BEGIN(0, 0);
6393 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6394 IEM_MC_ADVANCE_RIP();
6395 } IEM_MC_ELSE() {
6396 IEM_MC_REL_JMP_S32(i32Imm);
6397 } IEM_MC_ENDIF();
6398 IEM_MC_END();
6399 }
6400 return VINF_SUCCESS;
6401}
6402
6403
6404/** Opcode 0x0f 0x86. */
6405FNIEMOP_DEF(iemOp_jbe_Jv)
6406{
6407 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
6408 IEMOP_HLP_MIN_386();
6409 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6410 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6411 {
6412 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6414
6415 IEM_MC_BEGIN(0, 0);
6416 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6417 IEM_MC_REL_JMP_S16(i16Imm);
6418 } IEM_MC_ELSE() {
6419 IEM_MC_ADVANCE_RIP();
6420 } IEM_MC_ENDIF();
6421 IEM_MC_END();
6422 }
6423 else
6424 {
6425 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6427
6428 IEM_MC_BEGIN(0, 0);
6429 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6430 IEM_MC_REL_JMP_S32(i32Imm);
6431 } IEM_MC_ELSE() {
6432 IEM_MC_ADVANCE_RIP();
6433 } IEM_MC_ENDIF();
6434 IEM_MC_END();
6435 }
6436 return VINF_SUCCESS;
6437}
6438
6439
6440/** Opcode 0x0f 0x87. */
6441FNIEMOP_DEF(iemOp_jnbe_Jv)
6442{
6443 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
6444 IEMOP_HLP_MIN_386();
6445 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6446 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6447 {
6448 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6450
6451 IEM_MC_BEGIN(0, 0);
6452 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6453 IEM_MC_ADVANCE_RIP();
6454 } IEM_MC_ELSE() {
6455 IEM_MC_REL_JMP_S16(i16Imm);
6456 } IEM_MC_ENDIF();
6457 IEM_MC_END();
6458 }
6459 else
6460 {
6461 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6463
6464 IEM_MC_BEGIN(0, 0);
6465 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6466 IEM_MC_ADVANCE_RIP();
6467 } IEM_MC_ELSE() {
6468 IEM_MC_REL_JMP_S32(i32Imm);
6469 } IEM_MC_ENDIF();
6470 IEM_MC_END();
6471 }
6472 return VINF_SUCCESS;
6473}
6474
6475
6476/** Opcode 0x0f 0x88. */
6477FNIEMOP_DEF(iemOp_js_Jv)
6478{
6479 IEMOP_MNEMONIC(js_Jv, "js Jv");
6480 IEMOP_HLP_MIN_386();
6481 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6482 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6483 {
6484 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486
6487 IEM_MC_BEGIN(0, 0);
6488 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6489 IEM_MC_REL_JMP_S16(i16Imm);
6490 } IEM_MC_ELSE() {
6491 IEM_MC_ADVANCE_RIP();
6492 } IEM_MC_ENDIF();
6493 IEM_MC_END();
6494 }
6495 else
6496 {
6497 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6499
6500 IEM_MC_BEGIN(0, 0);
6501 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6502 IEM_MC_REL_JMP_S32(i32Imm);
6503 } IEM_MC_ELSE() {
6504 IEM_MC_ADVANCE_RIP();
6505 } IEM_MC_ENDIF();
6506 IEM_MC_END();
6507 }
6508 return VINF_SUCCESS;
6509}
6510
6511
6512/** Opcode 0x0f 0x89. */
6513FNIEMOP_DEF(iemOp_jns_Jv)
6514{
6515 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
6516 IEMOP_HLP_MIN_386();
6517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6518 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6519 {
6520 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6522
6523 IEM_MC_BEGIN(0, 0);
6524 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6525 IEM_MC_ADVANCE_RIP();
6526 } IEM_MC_ELSE() {
6527 IEM_MC_REL_JMP_S16(i16Imm);
6528 } IEM_MC_ENDIF();
6529 IEM_MC_END();
6530 }
6531 else
6532 {
6533 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6535
6536 IEM_MC_BEGIN(0, 0);
6537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6538 IEM_MC_ADVANCE_RIP();
6539 } IEM_MC_ELSE() {
6540 IEM_MC_REL_JMP_S32(i32Imm);
6541 } IEM_MC_ENDIF();
6542 IEM_MC_END();
6543 }
6544 return VINF_SUCCESS;
6545}
6546
6547
6548/** Opcode 0x0f 0x8a. */
6549FNIEMOP_DEF(iemOp_jp_Jv)
6550{
6551 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
6552 IEMOP_HLP_MIN_386();
6553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6554 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6555 {
6556 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6558
6559 IEM_MC_BEGIN(0, 0);
6560 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6561 IEM_MC_REL_JMP_S16(i16Imm);
6562 } IEM_MC_ELSE() {
6563 IEM_MC_ADVANCE_RIP();
6564 } IEM_MC_ENDIF();
6565 IEM_MC_END();
6566 }
6567 else
6568 {
6569 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6571
6572 IEM_MC_BEGIN(0, 0);
6573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6574 IEM_MC_REL_JMP_S32(i32Imm);
6575 } IEM_MC_ELSE() {
6576 IEM_MC_ADVANCE_RIP();
6577 } IEM_MC_ENDIF();
6578 IEM_MC_END();
6579 }
6580 return VINF_SUCCESS;
6581}
6582
6583
6584/** Opcode 0x0f 0x8b. */
6585FNIEMOP_DEF(iemOp_jnp_Jv)
6586{
6587 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
6588 IEMOP_HLP_MIN_386();
6589 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6590 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6591 {
6592 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6594
6595 IEM_MC_BEGIN(0, 0);
6596 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6597 IEM_MC_ADVANCE_RIP();
6598 } IEM_MC_ELSE() {
6599 IEM_MC_REL_JMP_S16(i16Imm);
6600 } IEM_MC_ENDIF();
6601 IEM_MC_END();
6602 }
6603 else
6604 {
6605 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6607
6608 IEM_MC_BEGIN(0, 0);
6609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6610 IEM_MC_ADVANCE_RIP();
6611 } IEM_MC_ELSE() {
6612 IEM_MC_REL_JMP_S32(i32Imm);
6613 } IEM_MC_ENDIF();
6614 IEM_MC_END();
6615 }
6616 return VINF_SUCCESS;
6617}
6618
6619
6620/** Opcode 0x0f 0x8c. */
6621FNIEMOP_DEF(iemOp_jl_Jv)
6622{
6623 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
6624 IEMOP_HLP_MIN_386();
6625 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6626 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6627 {
6628 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6630
6631 IEM_MC_BEGIN(0, 0);
6632 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6633 IEM_MC_REL_JMP_S16(i16Imm);
6634 } IEM_MC_ELSE() {
6635 IEM_MC_ADVANCE_RIP();
6636 } IEM_MC_ENDIF();
6637 IEM_MC_END();
6638 }
6639 else
6640 {
6641 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6643
6644 IEM_MC_BEGIN(0, 0);
6645 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6646 IEM_MC_REL_JMP_S32(i32Imm);
6647 } IEM_MC_ELSE() {
6648 IEM_MC_ADVANCE_RIP();
6649 } IEM_MC_ENDIF();
6650 IEM_MC_END();
6651 }
6652 return VINF_SUCCESS;
6653}
6654
6655
6656/** Opcode 0x0f 0x8d. */
6657FNIEMOP_DEF(iemOp_jnl_Jv)
6658{
6659 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
6660 IEMOP_HLP_MIN_386();
6661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6662 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6663 {
6664 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6666
6667 IEM_MC_BEGIN(0, 0);
6668 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6669 IEM_MC_ADVANCE_RIP();
6670 } IEM_MC_ELSE() {
6671 IEM_MC_REL_JMP_S16(i16Imm);
6672 } IEM_MC_ENDIF();
6673 IEM_MC_END();
6674 }
6675 else
6676 {
6677 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6679
6680 IEM_MC_BEGIN(0, 0);
6681 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6682 IEM_MC_ADVANCE_RIP();
6683 } IEM_MC_ELSE() {
6684 IEM_MC_REL_JMP_S32(i32Imm);
6685 } IEM_MC_ENDIF();
6686 IEM_MC_END();
6687 }
6688 return VINF_SUCCESS;
6689}
6690
6691
6692/** Opcode 0x0f 0x8e. */
6693FNIEMOP_DEF(iemOp_jle_Jv)
6694{
6695 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
6696 IEMOP_HLP_MIN_386();
6697 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6698 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6699 {
6700 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6702
6703 IEM_MC_BEGIN(0, 0);
6704 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6705 IEM_MC_REL_JMP_S16(i16Imm);
6706 } IEM_MC_ELSE() {
6707 IEM_MC_ADVANCE_RIP();
6708 } IEM_MC_ENDIF();
6709 IEM_MC_END();
6710 }
6711 else
6712 {
6713 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6715
6716 IEM_MC_BEGIN(0, 0);
6717 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6718 IEM_MC_REL_JMP_S32(i32Imm);
6719 } IEM_MC_ELSE() {
6720 IEM_MC_ADVANCE_RIP();
6721 } IEM_MC_ENDIF();
6722 IEM_MC_END();
6723 }
6724 return VINF_SUCCESS;
6725}
6726
6727
6728/** Opcode 0x0f 0x8f. */
6729FNIEMOP_DEF(iemOp_jnle_Jv)
6730{
6731 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
6732 IEMOP_HLP_MIN_386();
6733 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6734 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6735 {
6736 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6738
6739 IEM_MC_BEGIN(0, 0);
6740 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6741 IEM_MC_ADVANCE_RIP();
6742 } IEM_MC_ELSE() {
6743 IEM_MC_REL_JMP_S16(i16Imm);
6744 } IEM_MC_ENDIF();
6745 IEM_MC_END();
6746 }
6747 else
6748 {
6749 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6751
6752 IEM_MC_BEGIN(0, 0);
6753 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6754 IEM_MC_ADVANCE_RIP();
6755 } IEM_MC_ELSE() {
6756 IEM_MC_REL_JMP_S32(i32Imm);
6757 } IEM_MC_ENDIF();
6758 IEM_MC_END();
6759 }
6760 return VINF_SUCCESS;
6761}
6762
6763
6764/** Opcode 0x0f 0x90. */
6765FNIEMOP_DEF(iemOp_seto_Eb)
6766{
6767 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6768 IEMOP_HLP_MIN_386();
6769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6770
6771 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6772 * any way. AMD says it's "unused", whatever that means. We're
6773 * ignoring for now. */
6774 if (IEM_IS_MODRM_REG_MODE(bRm))
6775 {
6776 /* register target */
6777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6778 IEM_MC_BEGIN(0, 0);
6779 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6780 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6781 } IEM_MC_ELSE() {
6782 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6783 } IEM_MC_ENDIF();
6784 IEM_MC_ADVANCE_RIP();
6785 IEM_MC_END();
6786 }
6787 else
6788 {
6789 /* memory target */
6790 IEM_MC_BEGIN(0, 1);
6791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6794 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6795 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6796 } IEM_MC_ELSE() {
6797 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6798 } IEM_MC_ENDIF();
6799 IEM_MC_ADVANCE_RIP();
6800 IEM_MC_END();
6801 }
6802 return VINF_SUCCESS;
6803}
6804
6805
6806/** Opcode 0x0f 0x91. */
6807FNIEMOP_DEF(iemOp_setno_Eb)
6808{
6809 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6810 IEMOP_HLP_MIN_386();
6811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6812
6813 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6814 * any way. AMD says it's "unused", whatever that means. We're
6815 * ignoring for now. */
6816 if (IEM_IS_MODRM_REG_MODE(bRm))
6817 {
6818 /* register target */
6819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6820 IEM_MC_BEGIN(0, 0);
6821 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6822 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6823 } IEM_MC_ELSE() {
6824 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6825 } IEM_MC_ENDIF();
6826 IEM_MC_ADVANCE_RIP();
6827 IEM_MC_END();
6828 }
6829 else
6830 {
6831 /* memory target */
6832 IEM_MC_BEGIN(0, 1);
6833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6836 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6837 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6838 } IEM_MC_ELSE() {
6839 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6840 } IEM_MC_ENDIF();
6841 IEM_MC_ADVANCE_RIP();
6842 IEM_MC_END();
6843 }
6844 return VINF_SUCCESS;
6845}
6846
6847
6848/** Opcode 0x0f 0x92. */
6849FNIEMOP_DEF(iemOp_setc_Eb)
6850{
6851 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6852 IEMOP_HLP_MIN_386();
6853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6854
6855 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6856 * any way. AMD says it's "unused", whatever that means. We're
6857 * ignoring for now. */
6858 if (IEM_IS_MODRM_REG_MODE(bRm))
6859 {
6860 /* register target */
6861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6862 IEM_MC_BEGIN(0, 0);
6863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6864 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6865 } IEM_MC_ELSE() {
6866 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6867 } IEM_MC_ENDIF();
6868 IEM_MC_ADVANCE_RIP();
6869 IEM_MC_END();
6870 }
6871 else
6872 {
6873 /* memory target */
6874 IEM_MC_BEGIN(0, 1);
6875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6879 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6880 } IEM_MC_ELSE() {
6881 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6882 } IEM_MC_ENDIF();
6883 IEM_MC_ADVANCE_RIP();
6884 IEM_MC_END();
6885 }
6886 return VINF_SUCCESS;
6887}
6888
6889
6890/** Opcode 0x0f 0x93. */
6891FNIEMOP_DEF(iemOp_setnc_Eb)
6892{
6893 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6894 IEMOP_HLP_MIN_386();
6895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6896
6897 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6898 * any way. AMD says it's "unused", whatever that means. We're
6899 * ignoring for now. */
6900 if (IEM_IS_MODRM_REG_MODE(bRm))
6901 {
6902 /* register target */
6903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6904 IEM_MC_BEGIN(0, 0);
6905 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6906 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6907 } IEM_MC_ELSE() {
6908 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6909 } IEM_MC_ENDIF();
6910 IEM_MC_ADVANCE_RIP();
6911 IEM_MC_END();
6912 }
6913 else
6914 {
6915 /* memory target */
6916 IEM_MC_BEGIN(0, 1);
6917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6920 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6921 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6922 } IEM_MC_ELSE() {
6923 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6924 } IEM_MC_ENDIF();
6925 IEM_MC_ADVANCE_RIP();
6926 IEM_MC_END();
6927 }
6928 return VINF_SUCCESS;
6929}
6930
6931
6932/** Opcode 0x0f 0x94. */
6933FNIEMOP_DEF(iemOp_sete_Eb)
6934{
6935 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6936 IEMOP_HLP_MIN_386();
6937 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6938
6939 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6940 * any way. AMD says it's "unused", whatever that means. We're
6941 * ignoring for now. */
6942 if (IEM_IS_MODRM_REG_MODE(bRm))
6943 {
6944 /* register target */
6945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6946 IEM_MC_BEGIN(0, 0);
6947 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6948 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6949 } IEM_MC_ELSE() {
6950 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6951 } IEM_MC_ENDIF();
6952 IEM_MC_ADVANCE_RIP();
6953 IEM_MC_END();
6954 }
6955 else
6956 {
6957 /* memory target */
6958 IEM_MC_BEGIN(0, 1);
6959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6962 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6963 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6964 } IEM_MC_ELSE() {
6965 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6966 } IEM_MC_ENDIF();
6967 IEM_MC_ADVANCE_RIP();
6968 IEM_MC_END();
6969 }
6970 return VINF_SUCCESS;
6971}
6972
6973
6974/** Opcode 0x0f 0x95. */
6975FNIEMOP_DEF(iemOp_setne_Eb)
6976{
6977 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6978 IEMOP_HLP_MIN_386();
6979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6980
6981 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6982 * any way. AMD says it's "unused", whatever that means. We're
6983 * ignoring for now. */
6984 if (IEM_IS_MODRM_REG_MODE(bRm))
6985 {
6986 /* register target */
6987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6988 IEM_MC_BEGIN(0, 0);
6989 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6990 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6991 } IEM_MC_ELSE() {
6992 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6993 } IEM_MC_ENDIF();
6994 IEM_MC_ADVANCE_RIP();
6995 IEM_MC_END();
6996 }
6997 else
6998 {
6999 /* memory target */
7000 IEM_MC_BEGIN(0, 1);
7001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7004 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7005 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7006 } IEM_MC_ELSE() {
7007 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7008 } IEM_MC_ENDIF();
7009 IEM_MC_ADVANCE_RIP();
7010 IEM_MC_END();
7011 }
7012 return VINF_SUCCESS;
7013}
7014
7015
7016/** Opcode 0x0f 0x96. */
7017FNIEMOP_DEF(iemOp_setbe_Eb)
7018{
7019 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
7020 IEMOP_HLP_MIN_386();
7021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7022
7023 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7024 * any way. AMD says it's "unused", whatever that means. We're
7025 * ignoring for now. */
7026 if (IEM_IS_MODRM_REG_MODE(bRm))
7027 {
7028 /* register target */
7029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7030 IEM_MC_BEGIN(0, 0);
7031 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7032 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7033 } IEM_MC_ELSE() {
7034 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7035 } IEM_MC_ENDIF();
7036 IEM_MC_ADVANCE_RIP();
7037 IEM_MC_END();
7038 }
7039 else
7040 {
7041 /* memory target */
7042 IEM_MC_BEGIN(0, 1);
7043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7046 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7047 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7048 } IEM_MC_ELSE() {
7049 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7050 } IEM_MC_ENDIF();
7051 IEM_MC_ADVANCE_RIP();
7052 IEM_MC_END();
7053 }
7054 return VINF_SUCCESS;
7055}
7056
7057
7058/** Opcode 0x0f 0x97. */
7059FNIEMOP_DEF(iemOp_setnbe_Eb)
7060{
7061 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
7062 IEMOP_HLP_MIN_386();
7063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7064
7065 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7066 * any way. AMD says it's "unused", whatever that means. We're
7067 * ignoring for now. */
7068 if (IEM_IS_MODRM_REG_MODE(bRm))
7069 {
7070 /* register target */
7071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7072 IEM_MC_BEGIN(0, 0);
7073 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7074 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7075 } IEM_MC_ELSE() {
7076 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7077 } IEM_MC_ENDIF();
7078 IEM_MC_ADVANCE_RIP();
7079 IEM_MC_END();
7080 }
7081 else
7082 {
7083 /* memory target */
7084 IEM_MC_BEGIN(0, 1);
7085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7088 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7089 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7090 } IEM_MC_ELSE() {
7091 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7092 } IEM_MC_ENDIF();
7093 IEM_MC_ADVANCE_RIP();
7094 IEM_MC_END();
7095 }
7096 return VINF_SUCCESS;
7097}
7098
7099
7100/** Opcode 0x0f 0x98. */
7101FNIEMOP_DEF(iemOp_sets_Eb)
7102{
7103 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
7104 IEMOP_HLP_MIN_386();
7105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7106
7107 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7108 * any way. AMD says it's "unused", whatever that means. We're
7109 * ignoring for now. */
7110 if (IEM_IS_MODRM_REG_MODE(bRm))
7111 {
7112 /* register target */
7113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7114 IEM_MC_BEGIN(0, 0);
7115 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7116 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7117 } IEM_MC_ELSE() {
7118 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7119 } IEM_MC_ENDIF();
7120 IEM_MC_ADVANCE_RIP();
7121 IEM_MC_END();
7122 }
7123 else
7124 {
7125 /* memory target */
7126 IEM_MC_BEGIN(0, 1);
7127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7131 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7132 } IEM_MC_ELSE() {
7133 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7134 } IEM_MC_ENDIF();
7135 IEM_MC_ADVANCE_RIP();
7136 IEM_MC_END();
7137 }
7138 return VINF_SUCCESS;
7139}
7140
7141
7142/** Opcode 0x0f 0x99. */
7143FNIEMOP_DEF(iemOp_setns_Eb)
7144{
7145 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
7146 IEMOP_HLP_MIN_386();
7147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7148
7149 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7150 * any way. AMD says it's "unused", whatever that means. We're
7151 * ignoring for now. */
7152 if (IEM_IS_MODRM_REG_MODE(bRm))
7153 {
7154 /* register target */
7155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7156 IEM_MC_BEGIN(0, 0);
7157 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7158 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7159 } IEM_MC_ELSE() {
7160 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7161 } IEM_MC_ENDIF();
7162 IEM_MC_ADVANCE_RIP();
7163 IEM_MC_END();
7164 }
7165 else
7166 {
7167 /* memory target */
7168 IEM_MC_BEGIN(0, 1);
7169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7172 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7173 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7174 } IEM_MC_ELSE() {
7175 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7176 } IEM_MC_ENDIF();
7177 IEM_MC_ADVANCE_RIP();
7178 IEM_MC_END();
7179 }
7180 return VINF_SUCCESS;
7181}
7182
7183
7184/** Opcode 0x0f 0x9a. */
7185FNIEMOP_DEF(iemOp_setp_Eb)
7186{
7187 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
7188 IEMOP_HLP_MIN_386();
7189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7190
7191 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7192 * any way. AMD says it's "unused", whatever that means. We're
7193 * ignoring for now. */
7194 if (IEM_IS_MODRM_REG_MODE(bRm))
7195 {
7196 /* register target */
7197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7198 IEM_MC_BEGIN(0, 0);
7199 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7200 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7201 } IEM_MC_ELSE() {
7202 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7203 } IEM_MC_ENDIF();
7204 IEM_MC_ADVANCE_RIP();
7205 IEM_MC_END();
7206 }
7207 else
7208 {
7209 /* memory target */
7210 IEM_MC_BEGIN(0, 1);
7211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7214 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7215 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7216 } IEM_MC_ELSE() {
7217 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7218 } IEM_MC_ENDIF();
7219 IEM_MC_ADVANCE_RIP();
7220 IEM_MC_END();
7221 }
7222 return VINF_SUCCESS;
7223}
7224
7225
7226/** Opcode 0x0f 0x9b. */
7227FNIEMOP_DEF(iemOp_setnp_Eb)
7228{
7229 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
7230 IEMOP_HLP_MIN_386();
7231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7232
7233 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7234 * any way. AMD says it's "unused", whatever that means. We're
7235 * ignoring for now. */
7236 if (IEM_IS_MODRM_REG_MODE(bRm))
7237 {
7238 /* register target */
7239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7240 IEM_MC_BEGIN(0, 0);
7241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7242 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7243 } IEM_MC_ELSE() {
7244 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7245 } IEM_MC_ENDIF();
7246 IEM_MC_ADVANCE_RIP();
7247 IEM_MC_END();
7248 }
7249 else
7250 {
7251 /* memory target */
7252 IEM_MC_BEGIN(0, 1);
7253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7256 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7257 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7258 } IEM_MC_ELSE() {
7259 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7260 } IEM_MC_ENDIF();
7261 IEM_MC_ADVANCE_RIP();
7262 IEM_MC_END();
7263 }
7264 return VINF_SUCCESS;
7265}
7266
7267
7268/** Opcode 0x0f 0x9c. */
7269FNIEMOP_DEF(iemOp_setl_Eb)
7270{
7271 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
7272 IEMOP_HLP_MIN_386();
7273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7274
7275 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7276 * any way. AMD says it's "unused", whatever that means. We're
7277 * ignoring for now. */
7278 if (IEM_IS_MODRM_REG_MODE(bRm))
7279 {
7280 /* register target */
7281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7282 IEM_MC_BEGIN(0, 0);
7283 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7284 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7285 } IEM_MC_ELSE() {
7286 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7287 } IEM_MC_ENDIF();
7288 IEM_MC_ADVANCE_RIP();
7289 IEM_MC_END();
7290 }
7291 else
7292 {
7293 /* memory target */
7294 IEM_MC_BEGIN(0, 1);
7295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7298 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7299 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7300 } IEM_MC_ELSE() {
7301 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7302 } IEM_MC_ENDIF();
7303 IEM_MC_ADVANCE_RIP();
7304 IEM_MC_END();
7305 }
7306 return VINF_SUCCESS;
7307}
7308
7309
7310/** Opcode 0x0f 0x9d. */
7311FNIEMOP_DEF(iemOp_setnl_Eb)
7312{
7313 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
7314 IEMOP_HLP_MIN_386();
7315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7316
7317 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7318 * any way. AMD says it's "unused", whatever that means. We're
7319 * ignoring for now. */
7320 if (IEM_IS_MODRM_REG_MODE(bRm))
7321 {
7322 /* register target */
7323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7324 IEM_MC_BEGIN(0, 0);
7325 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7326 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7327 } IEM_MC_ELSE() {
7328 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7329 } IEM_MC_ENDIF();
7330 IEM_MC_ADVANCE_RIP();
7331 IEM_MC_END();
7332 }
7333 else
7334 {
7335 /* memory target */
7336 IEM_MC_BEGIN(0, 1);
7337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7340 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7341 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7342 } IEM_MC_ELSE() {
7343 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7344 } IEM_MC_ENDIF();
7345 IEM_MC_ADVANCE_RIP();
7346 IEM_MC_END();
7347 }
7348 return VINF_SUCCESS;
7349}
7350
7351
7352/** Opcode 0x0f 0x9e. */
7353FNIEMOP_DEF(iemOp_setle_Eb)
7354{
7355 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
7356 IEMOP_HLP_MIN_386();
7357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7358
7359 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7360 * any way. AMD says it's "unused", whatever that means. We're
7361 * ignoring for now. */
7362 if (IEM_IS_MODRM_REG_MODE(bRm))
7363 {
7364 /* register target */
7365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7366 IEM_MC_BEGIN(0, 0);
7367 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7368 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7369 } IEM_MC_ELSE() {
7370 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7371 } IEM_MC_ENDIF();
7372 IEM_MC_ADVANCE_RIP();
7373 IEM_MC_END();
7374 }
7375 else
7376 {
7377 /* memory target */
7378 IEM_MC_BEGIN(0, 1);
7379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7382 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7383 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7384 } IEM_MC_ELSE() {
7385 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7386 } IEM_MC_ENDIF();
7387 IEM_MC_ADVANCE_RIP();
7388 IEM_MC_END();
7389 }
7390 return VINF_SUCCESS;
7391}
7392
7393
7394/** Opcode 0x0f 0x9f. */
7395FNIEMOP_DEF(iemOp_setnle_Eb)
7396{
7397 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
7398 IEMOP_HLP_MIN_386();
7399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7400
7401 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7402 * any way. AMD says it's "unused", whatever that means. We're
7403 * ignoring for now. */
7404 if (IEM_IS_MODRM_REG_MODE(bRm))
7405 {
7406 /* register target */
7407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7408 IEM_MC_BEGIN(0, 0);
7409 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7410 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7411 } IEM_MC_ELSE() {
7412 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7413 } IEM_MC_ENDIF();
7414 IEM_MC_ADVANCE_RIP();
7415 IEM_MC_END();
7416 }
7417 else
7418 {
7419 /* memory target */
7420 IEM_MC_BEGIN(0, 1);
7421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7424 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7425 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7426 } IEM_MC_ELSE() {
7427 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7428 } IEM_MC_ENDIF();
7429 IEM_MC_ADVANCE_RIP();
7430 IEM_MC_END();
7431 }
7432 return VINF_SUCCESS;
7433}
7434
7435
7436/**
7437 * Common 'push segment-register' helper.
7438 */
7439FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
7440{
7441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7442 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
7443 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7444
7445 switch (pVCpu->iem.s.enmEffOpSize)
7446 {
7447 case IEMMODE_16BIT:
7448 IEM_MC_BEGIN(0, 1);
7449 IEM_MC_LOCAL(uint16_t, u16Value);
7450 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
7451 IEM_MC_PUSH_U16(u16Value);
7452 IEM_MC_ADVANCE_RIP();
7453 IEM_MC_END();
7454 break;
7455
7456 case IEMMODE_32BIT:
7457 IEM_MC_BEGIN(0, 1);
7458 IEM_MC_LOCAL(uint32_t, u32Value);
7459 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
7460 IEM_MC_PUSH_U32_SREG(u32Value);
7461 IEM_MC_ADVANCE_RIP();
7462 IEM_MC_END();
7463 break;
7464
7465 case IEMMODE_64BIT:
7466 IEM_MC_BEGIN(0, 1);
7467 IEM_MC_LOCAL(uint64_t, u64Value);
7468 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
7469 IEM_MC_PUSH_U64(u64Value);
7470 IEM_MC_ADVANCE_RIP();
7471 IEM_MC_END();
7472 break;
7473 }
7474
7475 return VINF_SUCCESS;
7476}
7477
7478
7479/** Opcode 0x0f 0xa0. */
7480FNIEMOP_DEF(iemOp_push_fs)
7481{
7482 IEMOP_MNEMONIC(push_fs, "push fs");
7483 IEMOP_HLP_MIN_386();
7484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7485 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
7486}
7487
7488
7489/** Opcode 0x0f 0xa1. */
7490FNIEMOP_DEF(iemOp_pop_fs)
7491{
7492 IEMOP_MNEMONIC(pop_fs, "pop fs");
7493 IEMOP_HLP_MIN_386();
7494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7495 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
7496}
7497
7498
7499/** Opcode 0x0f 0xa2. */
7500FNIEMOP_DEF(iemOp_cpuid)
7501{
7502 IEMOP_MNEMONIC(cpuid, "cpuid");
7503 IEMOP_HLP_MIN_486(); /* not all 486es. */
7504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7505 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
7506}
7507
7508
7509/**
7510 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
7511 * iemOp_bts_Ev_Gv.
7512 */
7513FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
7514{
7515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7517
7518 if (IEM_IS_MODRM_REG_MODE(bRm))
7519 {
7520 /* register destination. */
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7522 switch (pVCpu->iem.s.enmEffOpSize)
7523 {
7524 case IEMMODE_16BIT:
7525 IEM_MC_BEGIN(3, 0);
7526 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7527 IEM_MC_ARG(uint16_t, u16Src, 1);
7528 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7529
7530 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7531 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
7532 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7533 IEM_MC_REF_EFLAGS(pEFlags);
7534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7535
7536 IEM_MC_ADVANCE_RIP();
7537 IEM_MC_END();
7538 return VINF_SUCCESS;
7539
7540 case IEMMODE_32BIT:
7541 IEM_MC_BEGIN(3, 0);
7542 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7543 IEM_MC_ARG(uint32_t, u32Src, 1);
7544 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7545
7546 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7547 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
7548 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7549 IEM_MC_REF_EFLAGS(pEFlags);
7550 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7551
7552 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7553 IEM_MC_ADVANCE_RIP();
7554 IEM_MC_END();
7555 return VINF_SUCCESS;
7556
7557 case IEMMODE_64BIT:
7558 IEM_MC_BEGIN(3, 0);
7559 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7560 IEM_MC_ARG(uint64_t, u64Src, 1);
7561 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7562
7563 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7564 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
7565 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7566 IEM_MC_REF_EFLAGS(pEFlags);
7567 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7568
7569 IEM_MC_ADVANCE_RIP();
7570 IEM_MC_END();
7571 return VINF_SUCCESS;
7572
7573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7574 }
7575 }
7576 else
7577 {
7578 /* memory destination. */
7579
7580 uint32_t fAccess;
7581 if (pImpl->pfnLockedU16)
7582 fAccess = IEM_ACCESS_DATA_RW;
7583 else /* BT */
7584 fAccess = IEM_ACCESS_DATA_R;
7585
7586 /** @todo test negative bit offsets! */
7587 switch (pVCpu->iem.s.enmEffOpSize)
7588 {
7589 case IEMMODE_16BIT:
7590 IEM_MC_BEGIN(3, 2);
7591 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7592 IEM_MC_ARG(uint16_t, u16Src, 1);
7593 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7595 IEM_MC_LOCAL(int16_t, i16AddrAdj);
7596
7597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7598 if (pImpl->pfnLockedU16)
7599 IEMOP_HLP_DONE_DECODING();
7600 else
7601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7602 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7603 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
7604 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
7605 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
7606 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
7607 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
7608 IEM_MC_FETCH_EFLAGS(EFlags);
7609
7610 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7611 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7613 else
7614 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7615 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7616
7617 IEM_MC_COMMIT_EFLAGS(EFlags);
7618 IEM_MC_ADVANCE_RIP();
7619 IEM_MC_END();
7620 return VINF_SUCCESS;
7621
7622 case IEMMODE_32BIT:
7623 IEM_MC_BEGIN(3, 2);
7624 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7625 IEM_MC_ARG(uint32_t, u32Src, 1);
7626 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7628 IEM_MC_LOCAL(int32_t, i32AddrAdj);
7629
7630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7631 if (pImpl->pfnLockedU16)
7632 IEMOP_HLP_DONE_DECODING();
7633 else
7634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7635 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7636 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
7637 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
7638 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
7639 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
7640 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
7641 IEM_MC_FETCH_EFLAGS(EFlags);
7642
7643 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7644 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7645 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7646 else
7647 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7648 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7649
7650 IEM_MC_COMMIT_EFLAGS(EFlags);
7651 IEM_MC_ADVANCE_RIP();
7652 IEM_MC_END();
7653 return VINF_SUCCESS;
7654
7655 case IEMMODE_64BIT:
7656 IEM_MC_BEGIN(3, 2);
7657 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7658 IEM_MC_ARG(uint64_t, u64Src, 1);
7659 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7661 IEM_MC_LOCAL(int64_t, i64AddrAdj);
7662
7663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7664 if (pImpl->pfnLockedU16)
7665 IEMOP_HLP_DONE_DECODING();
7666 else
7667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7668 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7669 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
7670 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
7671 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
7672 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
7673 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
7674 IEM_MC_FETCH_EFLAGS(EFlags);
7675
7676 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7679 else
7680 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7681 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7682
7683 IEM_MC_COMMIT_EFLAGS(EFlags);
7684 IEM_MC_ADVANCE_RIP();
7685 IEM_MC_END();
7686 return VINF_SUCCESS;
7687
7688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7689 }
7690 }
7691}
7692
7693
7694/** Opcode 0x0f 0xa3. */
7695FNIEMOP_DEF(iemOp_bt_Ev_Gv)
7696{
7697 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
7698 IEMOP_HLP_MIN_386();
7699 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
7700}
7701
7702
7703/**
7704 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
7705 */
7706FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
7707{
7708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7709 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7710
7711 if (IEM_IS_MODRM_REG_MODE(bRm))
7712 {
7713 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7715
7716 switch (pVCpu->iem.s.enmEffOpSize)
7717 {
7718 case IEMMODE_16BIT:
7719 IEM_MC_BEGIN(4, 0);
7720 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7721 IEM_MC_ARG(uint16_t, u16Src, 1);
7722 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7723 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7724
7725 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7726 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7727 IEM_MC_REF_EFLAGS(pEFlags);
7728 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7729
7730 IEM_MC_ADVANCE_RIP();
7731 IEM_MC_END();
7732 return VINF_SUCCESS;
7733
7734 case IEMMODE_32BIT:
7735 IEM_MC_BEGIN(4, 0);
7736 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7737 IEM_MC_ARG(uint32_t, u32Src, 1);
7738 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7739 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7740
7741 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7742 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7743 IEM_MC_REF_EFLAGS(pEFlags);
7744 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7745
7746 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7747 IEM_MC_ADVANCE_RIP();
7748 IEM_MC_END();
7749 return VINF_SUCCESS;
7750
7751 case IEMMODE_64BIT:
7752 IEM_MC_BEGIN(4, 0);
7753 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7754 IEM_MC_ARG(uint64_t, u64Src, 1);
7755 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7756 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7757
7758 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7759 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7760 IEM_MC_REF_EFLAGS(pEFlags);
7761 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7762
7763 IEM_MC_ADVANCE_RIP();
7764 IEM_MC_END();
7765 return VINF_SUCCESS;
7766
7767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7768 }
7769 }
7770 else
7771 {
7772 switch (pVCpu->iem.s.enmEffOpSize)
7773 {
7774 case IEMMODE_16BIT:
7775 IEM_MC_BEGIN(4, 2);
7776 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7777 IEM_MC_ARG(uint16_t, u16Src, 1);
7778 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7779 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7781
7782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7783 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7784 IEM_MC_ASSIGN(cShiftArg, cShift);
7785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7786 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7787 IEM_MC_FETCH_EFLAGS(EFlags);
7788 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7789 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7790
7791 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7792 IEM_MC_COMMIT_EFLAGS(EFlags);
7793 IEM_MC_ADVANCE_RIP();
7794 IEM_MC_END();
7795 return VINF_SUCCESS;
7796
7797 case IEMMODE_32BIT:
7798 IEM_MC_BEGIN(4, 2);
7799 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7800 IEM_MC_ARG(uint32_t, u32Src, 1);
7801 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7804
7805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7806 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7807 IEM_MC_ASSIGN(cShiftArg, cShift);
7808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7809 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7810 IEM_MC_FETCH_EFLAGS(EFlags);
7811 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7812 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7813
7814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7815 IEM_MC_COMMIT_EFLAGS(EFlags);
7816 IEM_MC_ADVANCE_RIP();
7817 IEM_MC_END();
7818 return VINF_SUCCESS;
7819
7820 case IEMMODE_64BIT:
7821 IEM_MC_BEGIN(4, 2);
7822 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7823 IEM_MC_ARG(uint64_t, u64Src, 1);
7824 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7825 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7827
7828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7829 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7830 IEM_MC_ASSIGN(cShiftArg, cShift);
7831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7832 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7833 IEM_MC_FETCH_EFLAGS(EFlags);
7834 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7835 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7836
7837 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7838 IEM_MC_COMMIT_EFLAGS(EFlags);
7839 IEM_MC_ADVANCE_RIP();
7840 IEM_MC_END();
7841 return VINF_SUCCESS;
7842
7843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7844 }
7845 }
7846}
7847
7848
7849/**
7850 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7851 */
7852FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7853{
7854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7856
7857 if (IEM_IS_MODRM_REG_MODE(bRm))
7858 {
7859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7860
7861 switch (pVCpu->iem.s.enmEffOpSize)
7862 {
7863 case IEMMODE_16BIT:
7864 IEM_MC_BEGIN(4, 0);
7865 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7866 IEM_MC_ARG(uint16_t, u16Src, 1);
7867 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7868 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7869
7870 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7871 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7872 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7873 IEM_MC_REF_EFLAGS(pEFlags);
7874 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7875
7876 IEM_MC_ADVANCE_RIP();
7877 IEM_MC_END();
7878 return VINF_SUCCESS;
7879
7880 case IEMMODE_32BIT:
7881 IEM_MC_BEGIN(4, 0);
7882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7883 IEM_MC_ARG(uint32_t, u32Src, 1);
7884 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7885 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7886
7887 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7888 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7889 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7890 IEM_MC_REF_EFLAGS(pEFlags);
7891 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7892
7893 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7894 IEM_MC_ADVANCE_RIP();
7895 IEM_MC_END();
7896 return VINF_SUCCESS;
7897
7898 case IEMMODE_64BIT:
7899 IEM_MC_BEGIN(4, 0);
7900 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7901 IEM_MC_ARG(uint64_t, u64Src, 1);
7902 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7903 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7904
7905 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7906 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7907 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7908 IEM_MC_REF_EFLAGS(pEFlags);
7909 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7910
7911 IEM_MC_ADVANCE_RIP();
7912 IEM_MC_END();
7913 return VINF_SUCCESS;
7914
7915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7916 }
7917 }
7918 else
7919 {
7920 switch (pVCpu->iem.s.enmEffOpSize)
7921 {
7922 case IEMMODE_16BIT:
7923 IEM_MC_BEGIN(4, 2);
7924 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7925 IEM_MC_ARG(uint16_t, u16Src, 1);
7926 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7927 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7929
7930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7932 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7933 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7934 IEM_MC_FETCH_EFLAGS(EFlags);
7935 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7936 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7937
7938 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7939 IEM_MC_COMMIT_EFLAGS(EFlags);
7940 IEM_MC_ADVANCE_RIP();
7941 IEM_MC_END();
7942 return VINF_SUCCESS;
7943
7944 case IEMMODE_32BIT:
7945 IEM_MC_BEGIN(4, 2);
7946 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7947 IEM_MC_ARG(uint32_t, u32Src, 1);
7948 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7951
7952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7955 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7956 IEM_MC_FETCH_EFLAGS(EFlags);
7957 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7958 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7959
7960 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7961 IEM_MC_COMMIT_EFLAGS(EFlags);
7962 IEM_MC_ADVANCE_RIP();
7963 IEM_MC_END();
7964 return VINF_SUCCESS;
7965
7966 case IEMMODE_64BIT:
7967 IEM_MC_BEGIN(4, 2);
7968 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7969 IEM_MC_ARG(uint64_t, u64Src, 1);
7970 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7971 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7973
7974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7976 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7977 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7978 IEM_MC_FETCH_EFLAGS(EFlags);
7979 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7980 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7981
7982 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7983 IEM_MC_COMMIT_EFLAGS(EFlags);
7984 IEM_MC_ADVANCE_RIP();
7985 IEM_MC_END();
7986 return VINF_SUCCESS;
7987
7988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7989 }
7990 }
7991}
7992
7993
7994
7995/** Opcode 0x0f 0xa4. */
7996FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7997{
7998 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7999 IEMOP_HLP_MIN_386();
8000 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
8001}
8002
8003
8004/** Opcode 0x0f 0xa5. */
8005FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
8006{
8007 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
8008 IEMOP_HLP_MIN_386();
8009 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
8010}
8011
8012
8013/** Opcode 0x0f 0xa8. */
8014FNIEMOP_DEF(iemOp_push_gs)
8015{
8016 IEMOP_MNEMONIC(push_gs, "push gs");
8017 IEMOP_HLP_MIN_386();
8018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8019 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
8020}
8021
8022
8023/** Opcode 0x0f 0xa9. */
8024FNIEMOP_DEF(iemOp_pop_gs)
8025{
8026 IEMOP_MNEMONIC(pop_gs, "pop gs");
8027 IEMOP_HLP_MIN_386();
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
8030}
8031
8032
8033/** Opcode 0x0f 0xaa. */
8034FNIEMOP_DEF(iemOp_rsm)
8035{
8036 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
8037 IEMOP_HLP_MIN_386(); /* 386SL and later. */
8038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8039 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
8040}
8041
8042
8043
8044/** Opcode 0x0f 0xab. */
8045FNIEMOP_DEF(iemOp_bts_Ev_Gv)
8046{
8047 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
8048 IEMOP_HLP_MIN_386();
8049 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
8050}
8051
8052
8053/** Opcode 0x0f 0xac. */
8054FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
8055{
8056 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
8057 IEMOP_HLP_MIN_386();
8058 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
8059}
8060
8061
8062/** Opcode 0x0f 0xad. */
8063FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
8064{
8065 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
8066 IEMOP_HLP_MIN_386();
8067 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
8068}
8069
8070
8071/** Opcode 0x0f 0xae mem/0. */
8072FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
8073{
8074 IEMOP_MNEMONIC(fxsave, "fxsave m512");
8075 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
8076 return IEMOP_RAISE_INVALID_OPCODE();
8077
8078 IEM_MC_BEGIN(3, 1);
8079 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8080 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8081 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8084 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8085 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8086 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
8087 IEM_MC_END();
8088 return VINF_SUCCESS;
8089}
8090
8091
8092/** Opcode 0x0f 0xae mem/1. */
8093FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
8094{
8095 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
8096 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
8097 return IEMOP_RAISE_INVALID_OPCODE();
8098
8099 IEM_MC_BEGIN(3, 1);
8100 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8101 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8102 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8105 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8106 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8107 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
8108 IEM_MC_END();
8109 return VINF_SUCCESS;
8110}
8111
8112
8113/**
8114 * @opmaps grp15
8115 * @opcode !11/2
8116 * @oppfx none
8117 * @opcpuid sse
8118 * @opgroup og_sse_mxcsrsm
8119 * @opxcpttype 5
8120 * @optest op1=0 -> mxcsr=0
8121 * @optest op1=0x2083 -> mxcsr=0x2083
8122 * @optest op1=0xfffffffe -> value.xcpt=0xd
8123 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
8124 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
8125 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
8126 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
8127 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
8128 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
8129 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
8130 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
8131 */
8132FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
8133{
8134 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8135 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
8136 return IEMOP_RAISE_INVALID_OPCODE();
8137
8138 IEM_MC_BEGIN(2, 0);
8139 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8140 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8144 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8145 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
8146 IEM_MC_END();
8147 return VINF_SUCCESS;
8148}
8149
8150
8151/**
8152 * @opmaps grp15
8153 * @opcode !11/3
8154 * @oppfx none
8155 * @opcpuid sse
8156 * @opgroup og_sse_mxcsrsm
8157 * @opxcpttype 5
8158 * @optest mxcsr=0 -> op1=0
8159 * @optest mxcsr=0x2083 -> op1=0x2083
8160 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
8161 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
8162 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
8163 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
8164 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
8165 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
8166 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
8167 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
8168 */
8169FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
8170{
8171 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8172 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
8173 return IEMOP_RAISE_INVALID_OPCODE();
8174
8175 IEM_MC_BEGIN(2, 0);
8176 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8177 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8180 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8181 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8182 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
8183 IEM_MC_END();
8184 return VINF_SUCCESS;
8185}
8186
8187
8188/**
8189 * @opmaps grp15
8190 * @opcode !11/4
8191 * @oppfx none
8192 * @opcpuid xsave
8193 * @opgroup og_system
8194 * @opxcpttype none
8195 */
8196FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
8197{
8198 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
8199 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
8200 return IEMOP_RAISE_INVALID_OPCODE();
8201
8202 IEM_MC_BEGIN(3, 0);
8203 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8204 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8205 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8208 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8209 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8210 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
8211 IEM_MC_END();
8212 return VINF_SUCCESS;
8213}
8214
8215
8216/**
8217 * @opmaps grp15
8218 * @opcode !11/5
8219 * @oppfx none
8220 * @opcpuid xsave
8221 * @opgroup og_system
8222 * @opxcpttype none
8223 */
8224FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
8225{
8226 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
8227 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
8228 return IEMOP_RAISE_INVALID_OPCODE();
8229
8230 IEM_MC_BEGIN(3, 0);
8231 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8232 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8233 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8236 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8238 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
8239 IEM_MC_END();
8240 return VINF_SUCCESS;
8241}
8242
8243/** Opcode 0x0f 0xae mem/6. */
8244FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
8245
8246/**
8247 * @opmaps grp15
8248 * @opcode !11/7
8249 * @oppfx none
8250 * @opcpuid clfsh
8251 * @opgroup og_cachectl
8252 * @optest op1=1 ->
8253 */
8254FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
8255{
8256 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8257 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
8258 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
8259
8260 IEM_MC_BEGIN(2, 0);
8261 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8262 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8265 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8266 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
8267 IEM_MC_END();
8268 return VINF_SUCCESS;
8269}
8270
8271/**
8272 * @opmaps grp15
8273 * @opcode !11/7
8274 * @oppfx 0x66
8275 * @opcpuid clflushopt
8276 * @opgroup og_cachectl
8277 * @optest op1=1 ->
8278 */
8279FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
8280{
8281 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8282 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
8283 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
8284
8285 IEM_MC_BEGIN(2, 0);
8286 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8287 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8290 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8291 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
8292 IEM_MC_END();
8293 return VINF_SUCCESS;
8294}
8295
8296
8297/** Opcode 0x0f 0xae 11b/5. */
8298FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
8299{
8300 RT_NOREF_PV(bRm);
8301 IEMOP_MNEMONIC(lfence, "lfence");
8302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8303 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8304 return IEMOP_RAISE_INVALID_OPCODE();
8305
8306 IEM_MC_BEGIN(0, 0);
8307#ifndef RT_ARCH_ARM64
8308 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8309#endif
8310 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
8311#ifndef RT_ARCH_ARM64
8312 else
8313 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8314#endif
8315 IEM_MC_ADVANCE_RIP();
8316 IEM_MC_END();
8317 return VINF_SUCCESS;
8318}
8319
8320
8321/** Opcode 0x0f 0xae 11b/6. */
8322FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
8323{
8324 RT_NOREF_PV(bRm);
8325 IEMOP_MNEMONIC(mfence, "mfence");
8326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8327 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8328 return IEMOP_RAISE_INVALID_OPCODE();
8329
8330 IEM_MC_BEGIN(0, 0);
8331#ifndef RT_ARCH_ARM64
8332 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8333#endif
8334 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
8335#ifndef RT_ARCH_ARM64
8336 else
8337 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8338#endif
8339 IEM_MC_ADVANCE_RIP();
8340 IEM_MC_END();
8341 return VINF_SUCCESS;
8342}
8343
8344
8345/** Opcode 0x0f 0xae 11b/7. */
8346FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
8347{
8348 RT_NOREF_PV(bRm);
8349 IEMOP_MNEMONIC(sfence, "sfence");
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8351 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8352 return IEMOP_RAISE_INVALID_OPCODE();
8353
8354 IEM_MC_BEGIN(0, 0);
8355#ifndef RT_ARCH_ARM64
8356 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8357#endif
8358 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
8359#ifndef RT_ARCH_ARM64
8360 else
8361 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8362#endif
8363 IEM_MC_ADVANCE_RIP();
8364 IEM_MC_END();
8365 return VINF_SUCCESS;
8366}
8367
8368
8369/** Opcode 0xf3 0x0f 0xae 11b/0. */
8370FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
8371{
8372 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
8373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8374 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8375 {
8376 IEM_MC_BEGIN(1, 0);
8377 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8378 IEM_MC_ARG(uint64_t, u64Dst, 0);
8379 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
8380 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8381 IEM_MC_ADVANCE_RIP();
8382 IEM_MC_END();
8383 }
8384 else
8385 {
8386 IEM_MC_BEGIN(1, 0);
8387 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8388 IEM_MC_ARG(uint32_t, u32Dst, 0);
8389 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
8390 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8391 IEM_MC_ADVANCE_RIP();
8392 IEM_MC_END();
8393 }
8394 return VINF_SUCCESS;
8395}
8396
8397
8398/** Opcode 0xf3 0x0f 0xae 11b/1. */
8399FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
8400{
8401 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
8402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8403 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8404 {
8405 IEM_MC_BEGIN(1, 0);
8406 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8407 IEM_MC_ARG(uint64_t, u64Dst, 0);
8408 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
8409 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8410 IEM_MC_ADVANCE_RIP();
8411 IEM_MC_END();
8412 }
8413 else
8414 {
8415 IEM_MC_BEGIN(1, 0);
8416 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8417 IEM_MC_ARG(uint32_t, u32Dst, 0);
8418 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
8419 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8420 IEM_MC_ADVANCE_RIP();
8421 IEM_MC_END();
8422 }
8423 return VINF_SUCCESS;
8424}
8425
8426
8427/** Opcode 0xf3 0x0f 0xae 11b/2. */
8428FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
8429{
8430 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
8431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8432 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8433 {
8434 IEM_MC_BEGIN(1, 0);
8435 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8436 IEM_MC_ARG(uint64_t, u64Dst, 0);
8437 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8438 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8439 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
8440 IEM_MC_ADVANCE_RIP();
8441 IEM_MC_END();
8442 }
8443 else
8444 {
8445 IEM_MC_BEGIN(1, 0);
8446 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8447 IEM_MC_ARG(uint32_t, u32Dst, 0);
8448 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8449 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
8450 IEM_MC_ADVANCE_RIP();
8451 IEM_MC_END();
8452 }
8453 return VINF_SUCCESS;
8454}
8455
8456
8457/** Opcode 0xf3 0x0f 0xae 11b/3. */
8458FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
8459{
8460 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
8461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8462 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8463 {
8464 IEM_MC_BEGIN(1, 0);
8465 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8466 IEM_MC_ARG(uint64_t, u64Dst, 0);
8467 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8468 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8469 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
8470 IEM_MC_ADVANCE_RIP();
8471 IEM_MC_END();
8472 }
8473 else
8474 {
8475 IEM_MC_BEGIN(1, 0);
8476 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8477 IEM_MC_ARG(uint32_t, u32Dst, 0);
8478 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8479 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
8480 IEM_MC_ADVANCE_RIP();
8481 IEM_MC_END();
8482 }
8483 return VINF_SUCCESS;
8484}
8485
8486
8487/**
8488 * Group 15 jump table for register variant.
8489 */
8490IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
8491{ /* pfx: none, 066h, 0f3h, 0f2h */
8492 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
8493 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
8494 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
8495 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
8496 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8497 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8498 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8499 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8500};
8501AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
8502
8503
8504/**
8505 * Group 15 jump table for memory variant.
8506 */
8507IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
8508{ /* pfx: none, 066h, 0f3h, 0f2h */
8509 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8510 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8511 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8512 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8513 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8514 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8515 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8516 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8517};
8518AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
8519
8520
8521/** Opcode 0x0f 0xae. */
8522FNIEMOP_DEF(iemOp_Grp15)
8523{
8524 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
8525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8526 if (IEM_IS_MODRM_REG_MODE(bRm))
8527 /* register, register */
8528 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8529 + pVCpu->iem.s.idxPrefix], bRm);
8530 /* memory, register */
8531 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8532 + pVCpu->iem.s.idxPrefix], bRm);
8533}
8534
8535
8536/** Opcode 0x0f 0xaf. */
8537FNIEMOP_DEF(iemOp_imul_Gv_Ev)
8538{
8539 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
8540 IEMOP_HLP_MIN_386();
8541 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8542 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
8543}
8544
8545
8546/** Opcode 0x0f 0xb0. */
8547FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
8548{
8549 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
8550 IEMOP_HLP_MIN_486();
8551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8552
8553 if (IEM_IS_MODRM_REG_MODE(bRm))
8554 {
8555 IEMOP_HLP_DONE_DECODING();
8556 IEM_MC_BEGIN(4, 0);
8557 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8558 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8559 IEM_MC_ARG(uint8_t, u8Src, 2);
8560 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8561
8562 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8563 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8564 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
8565 IEM_MC_REF_EFLAGS(pEFlags);
8566 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8567 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8568 else
8569 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8570
8571 IEM_MC_ADVANCE_RIP();
8572 IEM_MC_END();
8573 }
8574 else
8575 {
8576 IEM_MC_BEGIN(4, 3);
8577 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8578 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8579 IEM_MC_ARG(uint8_t, u8Src, 2);
8580 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8582 IEM_MC_LOCAL(uint8_t, u8Al);
8583
8584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8585 IEMOP_HLP_DONE_DECODING();
8586 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8587 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8588 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
8589 IEM_MC_FETCH_EFLAGS(EFlags);
8590 IEM_MC_REF_LOCAL(pu8Al, u8Al);
8591 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8592 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8593 else
8594 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8595
8596 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8597 IEM_MC_COMMIT_EFLAGS(EFlags);
8598 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
8599 IEM_MC_ADVANCE_RIP();
8600 IEM_MC_END();
8601 }
8602 return VINF_SUCCESS;
8603}
8604
8605/** Opcode 0x0f 0xb1. */
8606FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
8607{
8608 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
8609 IEMOP_HLP_MIN_486();
8610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8611
8612 if (IEM_IS_MODRM_REG_MODE(bRm))
8613 {
8614 IEMOP_HLP_DONE_DECODING();
8615 switch (pVCpu->iem.s.enmEffOpSize)
8616 {
8617 case IEMMODE_16BIT:
8618 IEM_MC_BEGIN(4, 0);
8619 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8620 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8621 IEM_MC_ARG(uint16_t, u16Src, 2);
8622 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8623
8624 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8625 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8626 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
8627 IEM_MC_REF_EFLAGS(pEFlags);
8628 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8629 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8630 else
8631 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8632
8633 IEM_MC_ADVANCE_RIP();
8634 IEM_MC_END();
8635 return VINF_SUCCESS;
8636
8637 case IEMMODE_32BIT:
8638 IEM_MC_BEGIN(4, 0);
8639 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8640 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8641 IEM_MC_ARG(uint32_t, u32Src, 2);
8642 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8643
8644 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8645 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8646 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
8647 IEM_MC_REF_EFLAGS(pEFlags);
8648 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8649 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8650 else
8651 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8652
8653 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
8654 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8655 IEM_MC_ADVANCE_RIP();
8656 IEM_MC_END();
8657 return VINF_SUCCESS;
8658
8659 case IEMMODE_64BIT:
8660 IEM_MC_BEGIN(4, 0);
8661 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8662 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8663#ifdef RT_ARCH_X86
8664 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8665#else
8666 IEM_MC_ARG(uint64_t, u64Src, 2);
8667#endif
8668 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8669
8670 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8671 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
8672 IEM_MC_REF_EFLAGS(pEFlags);
8673#ifdef RT_ARCH_X86
8674 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8675 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8676 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8677 else
8678 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8679#else
8680 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8681 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8682 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8683 else
8684 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8685#endif
8686
8687 IEM_MC_ADVANCE_RIP();
8688 IEM_MC_END();
8689 return VINF_SUCCESS;
8690
8691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8692 }
8693 }
8694 else
8695 {
8696 switch (pVCpu->iem.s.enmEffOpSize)
8697 {
8698 case IEMMODE_16BIT:
8699 IEM_MC_BEGIN(4, 3);
8700 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8701 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8702 IEM_MC_ARG(uint16_t, u16Src, 2);
8703 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8705 IEM_MC_LOCAL(uint16_t, u16Ax);
8706
8707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8708 IEMOP_HLP_DONE_DECODING();
8709 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8710 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8711 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
8712 IEM_MC_FETCH_EFLAGS(EFlags);
8713 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
8714 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8715 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8716 else
8717 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8718
8719 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8720 IEM_MC_COMMIT_EFLAGS(EFlags);
8721 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
8722 IEM_MC_ADVANCE_RIP();
8723 IEM_MC_END();
8724 return VINF_SUCCESS;
8725
8726 case IEMMODE_32BIT:
8727 IEM_MC_BEGIN(4, 3);
8728 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8729 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8730 IEM_MC_ARG(uint32_t, u32Src, 2);
8731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8733 IEM_MC_LOCAL(uint32_t, u32Eax);
8734
8735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8736 IEMOP_HLP_DONE_DECODING();
8737 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8738 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8739 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
8740 IEM_MC_FETCH_EFLAGS(EFlags);
8741 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
8742 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8743 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8744 else
8745 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8746
8747 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8748 IEM_MC_COMMIT_EFLAGS(EFlags);
8749 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
8750 IEM_MC_ADVANCE_RIP();
8751 IEM_MC_END();
8752 return VINF_SUCCESS;
8753
8754 case IEMMODE_64BIT:
8755 IEM_MC_BEGIN(4, 3);
8756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8757 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8758#ifdef RT_ARCH_X86
8759 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8760#else
8761 IEM_MC_ARG(uint64_t, u64Src, 2);
8762#endif
8763 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8765 IEM_MC_LOCAL(uint64_t, u64Rax);
8766
8767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8768 IEMOP_HLP_DONE_DECODING();
8769 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8770 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8771 IEM_MC_FETCH_EFLAGS(EFlags);
8772 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8773#ifdef RT_ARCH_X86
8774 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8775 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8776 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8777 else
8778 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8779#else
8780 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8781 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8782 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8783 else
8784 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8785#endif
8786
8787 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8788 IEM_MC_COMMIT_EFLAGS(EFlags);
8789 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8790 IEM_MC_ADVANCE_RIP();
8791 IEM_MC_END();
8792 return VINF_SUCCESS;
8793
8794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8795 }
8796 }
8797}
8798
8799
8800FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8801{
8802 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8803 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8804
8805 switch (pVCpu->iem.s.enmEffOpSize)
8806 {
8807 case IEMMODE_16BIT:
8808 IEM_MC_BEGIN(5, 1);
8809 IEM_MC_ARG(uint16_t, uSel, 0);
8810 IEM_MC_ARG(uint16_t, offSeg, 1);
8811 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8812 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8813 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8814 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8817 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8818 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8819 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8820 IEM_MC_END();
8821 return VINF_SUCCESS;
8822
8823 case IEMMODE_32BIT:
8824 IEM_MC_BEGIN(5, 1);
8825 IEM_MC_ARG(uint16_t, uSel, 0);
8826 IEM_MC_ARG(uint32_t, offSeg, 1);
8827 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8828 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8829 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8830 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8833 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8834 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8835 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8836 IEM_MC_END();
8837 return VINF_SUCCESS;
8838
8839 case IEMMODE_64BIT:
8840 IEM_MC_BEGIN(5, 1);
8841 IEM_MC_ARG(uint16_t, uSel, 0);
8842 IEM_MC_ARG(uint64_t, offSeg, 1);
8843 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8844 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8845 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8846 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8849 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8850 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8851 else
8852 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8853 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8854 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8855 IEM_MC_END();
8856 return VINF_SUCCESS;
8857
8858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8859 }
8860}
8861
8862
8863/** Opcode 0x0f 0xb2. */
8864FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8865{
8866 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8867 IEMOP_HLP_MIN_386();
8868 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8869 if (IEM_IS_MODRM_REG_MODE(bRm))
8870 return IEMOP_RAISE_INVALID_OPCODE();
8871 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8872}
8873
8874
8875/** Opcode 0x0f 0xb3. */
8876FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8877{
8878 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8879 IEMOP_HLP_MIN_386();
8880 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8881}
8882
8883
8884/** Opcode 0x0f 0xb4. */
8885FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8886{
8887 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8888 IEMOP_HLP_MIN_386();
8889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8890 if (IEM_IS_MODRM_REG_MODE(bRm))
8891 return IEMOP_RAISE_INVALID_OPCODE();
8892 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8893}
8894
8895
8896/** Opcode 0x0f 0xb5. */
8897FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8898{
8899 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8900 IEMOP_HLP_MIN_386();
8901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8902 if (IEM_IS_MODRM_REG_MODE(bRm))
8903 return IEMOP_RAISE_INVALID_OPCODE();
8904 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8905}
8906
8907
8908/** Opcode 0x0f 0xb6. */
8909FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8910{
8911 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8912 IEMOP_HLP_MIN_386();
8913
8914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8915
8916 /*
8917 * If rm is denoting a register, no more instruction bytes.
8918 */
8919 if (IEM_IS_MODRM_REG_MODE(bRm))
8920 {
8921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8922 switch (pVCpu->iem.s.enmEffOpSize)
8923 {
8924 case IEMMODE_16BIT:
8925 IEM_MC_BEGIN(0, 1);
8926 IEM_MC_LOCAL(uint16_t, u16Value);
8927 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8928 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8929 IEM_MC_ADVANCE_RIP();
8930 IEM_MC_END();
8931 return VINF_SUCCESS;
8932
8933 case IEMMODE_32BIT:
8934 IEM_MC_BEGIN(0, 1);
8935 IEM_MC_LOCAL(uint32_t, u32Value);
8936 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8937 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8938 IEM_MC_ADVANCE_RIP();
8939 IEM_MC_END();
8940 return VINF_SUCCESS;
8941
8942 case IEMMODE_64BIT:
8943 IEM_MC_BEGIN(0, 1);
8944 IEM_MC_LOCAL(uint64_t, u64Value);
8945 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8946 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8947 IEM_MC_ADVANCE_RIP();
8948 IEM_MC_END();
8949 return VINF_SUCCESS;
8950
8951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8952 }
8953 }
8954 else
8955 {
8956 /*
8957 * We're loading a register from memory.
8958 */
8959 switch (pVCpu->iem.s.enmEffOpSize)
8960 {
8961 case IEMMODE_16BIT:
8962 IEM_MC_BEGIN(0, 2);
8963 IEM_MC_LOCAL(uint16_t, u16Value);
8964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8967 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8968 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8969 IEM_MC_ADVANCE_RIP();
8970 IEM_MC_END();
8971 return VINF_SUCCESS;
8972
8973 case IEMMODE_32BIT:
8974 IEM_MC_BEGIN(0, 2);
8975 IEM_MC_LOCAL(uint32_t, u32Value);
8976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8979 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8980 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8981 IEM_MC_ADVANCE_RIP();
8982 IEM_MC_END();
8983 return VINF_SUCCESS;
8984
8985 case IEMMODE_64BIT:
8986 IEM_MC_BEGIN(0, 2);
8987 IEM_MC_LOCAL(uint64_t, u64Value);
8988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8991 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8992 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8993 IEM_MC_ADVANCE_RIP();
8994 IEM_MC_END();
8995 return VINF_SUCCESS;
8996
8997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8998 }
8999 }
9000}
9001
9002
9003/** Opcode 0x0f 0xb7. */
9004FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
9005{
9006 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
9007 IEMOP_HLP_MIN_386();
9008
9009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9010
9011 /** @todo Not entirely sure how the operand size prefix is handled here,
9012 * assuming that it will be ignored. Would be nice to have a few
9013 * test for this. */
9014 /*
9015 * If rm is denoting a register, no more instruction bytes.
9016 */
9017 if (IEM_IS_MODRM_REG_MODE(bRm))
9018 {
9019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9020 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9021 {
9022 IEM_MC_BEGIN(0, 1);
9023 IEM_MC_LOCAL(uint32_t, u32Value);
9024 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9025 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9026 IEM_MC_ADVANCE_RIP();
9027 IEM_MC_END();
9028 }
9029 else
9030 {
9031 IEM_MC_BEGIN(0, 1);
9032 IEM_MC_LOCAL(uint64_t, u64Value);
9033 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9034 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9035 IEM_MC_ADVANCE_RIP();
9036 IEM_MC_END();
9037 }
9038 }
9039 else
9040 {
9041 /*
9042 * We're loading a register from memory.
9043 */
9044 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9045 {
9046 IEM_MC_BEGIN(0, 2);
9047 IEM_MC_LOCAL(uint32_t, u32Value);
9048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9051 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9052 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9053 IEM_MC_ADVANCE_RIP();
9054 IEM_MC_END();
9055 }
9056 else
9057 {
9058 IEM_MC_BEGIN(0, 2);
9059 IEM_MC_LOCAL(uint64_t, u64Value);
9060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9063 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9064 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9065 IEM_MC_ADVANCE_RIP();
9066 IEM_MC_END();
9067 }
9068 }
9069 return VINF_SUCCESS;
9070}
9071
9072
9073/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
9074FNIEMOP_UD_STUB(iemOp_jmpe);
9075
9076
9077/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
9078FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
9079{
9080 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9081 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
9082 return iemOp_InvalidNeedRM(pVCpu);
9083#ifndef TST_IEM_CHECK_MC
9084# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
9085 static const IEMOPBINSIZES s_Native =
9086 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
9087# endif
9088 static const IEMOPBINSIZES s_Fallback =
9089 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
9090#endif
9091 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
9092}
9093
9094
9095/**
9096 * @opcode 0xb9
9097 * @opinvalid intel-modrm
9098 * @optest ->
9099 */
9100FNIEMOP_DEF(iemOp_Grp10)
9101{
9102 /*
9103 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
9104 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
9105 */
9106 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
9107 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
9108 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
9109}
9110
9111
9112/** Opcode 0x0f 0xba. */
9113FNIEMOP_DEF(iemOp_Grp8)
9114{
9115 IEMOP_HLP_MIN_386();
9116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9117 PCIEMOPBINSIZES pImpl;
9118 switch (IEM_GET_MODRM_REG_8(bRm))
9119 {
9120 case 0: case 1: case 2: case 3:
9121 /* Both AMD and Intel want full modr/m decoding and imm8. */
9122 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
9123 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
9124 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
9125 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
9126 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
9127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9128 }
9129 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9130
9131 if (IEM_IS_MODRM_REG_MODE(bRm))
9132 {
9133 /* register destination. */
9134 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9136
9137 switch (pVCpu->iem.s.enmEffOpSize)
9138 {
9139 case IEMMODE_16BIT:
9140 IEM_MC_BEGIN(3, 0);
9141 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9142 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
9143 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9144
9145 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9146 IEM_MC_REF_EFLAGS(pEFlags);
9147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9148
9149 IEM_MC_ADVANCE_RIP();
9150 IEM_MC_END();
9151 return VINF_SUCCESS;
9152
9153 case IEMMODE_32BIT:
9154 IEM_MC_BEGIN(3, 0);
9155 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9156 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
9157 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9158
9159 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9160 IEM_MC_REF_EFLAGS(pEFlags);
9161 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9162
9163 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9164 IEM_MC_ADVANCE_RIP();
9165 IEM_MC_END();
9166 return VINF_SUCCESS;
9167
9168 case IEMMODE_64BIT:
9169 IEM_MC_BEGIN(3, 0);
9170 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9171 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
9172 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9173
9174 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9175 IEM_MC_REF_EFLAGS(pEFlags);
9176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9177
9178 IEM_MC_ADVANCE_RIP();
9179 IEM_MC_END();
9180 return VINF_SUCCESS;
9181
9182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9183 }
9184 }
9185 else
9186 {
9187 /* memory destination. */
9188
9189 uint32_t fAccess;
9190 if (pImpl->pfnLockedU16)
9191 fAccess = IEM_ACCESS_DATA_RW;
9192 else /* BT */
9193 fAccess = IEM_ACCESS_DATA_R;
9194
9195 /** @todo test negative bit offsets! */
9196 switch (pVCpu->iem.s.enmEffOpSize)
9197 {
9198 case IEMMODE_16BIT:
9199 IEM_MC_BEGIN(3, 1);
9200 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9201 IEM_MC_ARG(uint16_t, u16Src, 1);
9202 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9204
9205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9206 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9207 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
9208 if (pImpl->pfnLockedU16)
9209 IEMOP_HLP_DONE_DECODING();
9210 else
9211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9212 IEM_MC_FETCH_EFLAGS(EFlags);
9213 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9214 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9215 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9216 else
9217 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9218 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9219
9220 IEM_MC_COMMIT_EFLAGS(EFlags);
9221 IEM_MC_ADVANCE_RIP();
9222 IEM_MC_END();
9223 return VINF_SUCCESS;
9224
9225 case IEMMODE_32BIT:
9226 IEM_MC_BEGIN(3, 1);
9227 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9228 IEM_MC_ARG(uint32_t, u32Src, 1);
9229 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9231
9232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9233 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9234 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
9235 if (pImpl->pfnLockedU16)
9236 IEMOP_HLP_DONE_DECODING();
9237 else
9238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9239 IEM_MC_FETCH_EFLAGS(EFlags);
9240 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9241 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9242 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9243 else
9244 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9245 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9246
9247 IEM_MC_COMMIT_EFLAGS(EFlags);
9248 IEM_MC_ADVANCE_RIP();
9249 IEM_MC_END();
9250 return VINF_SUCCESS;
9251
9252 case IEMMODE_64BIT:
9253 IEM_MC_BEGIN(3, 1);
9254 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9255 IEM_MC_ARG(uint64_t, u64Src, 1);
9256 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9258
9259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9260 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9261 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
9262 if (pImpl->pfnLockedU16)
9263 IEMOP_HLP_DONE_DECODING();
9264 else
9265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9266 IEM_MC_FETCH_EFLAGS(EFlags);
9267 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9268 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9269 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9270 else
9271 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9272 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9273
9274 IEM_MC_COMMIT_EFLAGS(EFlags);
9275 IEM_MC_ADVANCE_RIP();
9276 IEM_MC_END();
9277 return VINF_SUCCESS;
9278
9279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9280 }
9281 }
9282}
9283
9284
9285/** Opcode 0x0f 0xbb. */
9286FNIEMOP_DEF(iemOp_btc_Ev_Gv)
9287{
9288 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
9289 IEMOP_HLP_MIN_386();
9290 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
9291}
9292
9293
9294/**
9295 * Common worker for BSF and BSR instructions.
9296 *
9297 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
9298 * the destination register, which means that for 32-bit operations the high
9299 * bits must be left alone.
9300 *
9301 * @param pImpl Pointer to the instruction implementation (assembly).
9302 */
9303FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
9304{
9305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9306
9307 /*
9308 * If rm is denoting a register, no more instruction bytes.
9309 */
9310 if (IEM_IS_MODRM_REG_MODE(bRm))
9311 {
9312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9313 switch (pVCpu->iem.s.enmEffOpSize)
9314 {
9315 case IEMMODE_16BIT:
9316 IEM_MC_BEGIN(3, 0);
9317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9318 IEM_MC_ARG(uint16_t, u16Src, 1);
9319 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9320
9321 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9322 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9323 IEM_MC_REF_EFLAGS(pEFlags);
9324 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9325
9326 IEM_MC_ADVANCE_RIP();
9327 IEM_MC_END();
9328 break;
9329
9330 case IEMMODE_32BIT:
9331 IEM_MC_BEGIN(3, 0);
9332 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9333 IEM_MC_ARG(uint32_t, u32Src, 1);
9334 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9335
9336 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9337 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9338 IEM_MC_REF_EFLAGS(pEFlags);
9339 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9340 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9341 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9342 IEM_MC_ENDIF();
9343 IEM_MC_ADVANCE_RIP();
9344 IEM_MC_END();
9345 break;
9346
9347 case IEMMODE_64BIT:
9348 IEM_MC_BEGIN(3, 0);
9349 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9350 IEM_MC_ARG(uint64_t, u64Src, 1);
9351 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9352
9353 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9354 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9355 IEM_MC_REF_EFLAGS(pEFlags);
9356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9357
9358 IEM_MC_ADVANCE_RIP();
9359 IEM_MC_END();
9360 break;
9361 }
9362 }
9363 else
9364 {
9365 /*
9366 * We're accessing memory.
9367 */
9368 switch (pVCpu->iem.s.enmEffOpSize)
9369 {
9370 case IEMMODE_16BIT:
9371 IEM_MC_BEGIN(3, 1);
9372 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9373 IEM_MC_ARG(uint16_t, u16Src, 1);
9374 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9376
9377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9379 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9380 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9381 IEM_MC_REF_EFLAGS(pEFlags);
9382 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9383
9384 IEM_MC_ADVANCE_RIP();
9385 IEM_MC_END();
9386 break;
9387
9388 case IEMMODE_32BIT:
9389 IEM_MC_BEGIN(3, 1);
9390 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9391 IEM_MC_ARG(uint32_t, u32Src, 1);
9392 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9394
9395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9397 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9398 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9399 IEM_MC_REF_EFLAGS(pEFlags);
9400 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9401
9402 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9403 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9404 IEM_MC_ENDIF();
9405 IEM_MC_ADVANCE_RIP();
9406 IEM_MC_END();
9407 break;
9408
9409 case IEMMODE_64BIT:
9410 IEM_MC_BEGIN(3, 1);
9411 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9412 IEM_MC_ARG(uint64_t, u64Src, 1);
9413 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9415
9416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9418 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9419 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9420 IEM_MC_REF_EFLAGS(pEFlags);
9421 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9422
9423 IEM_MC_ADVANCE_RIP();
9424 IEM_MC_END();
9425 break;
9426 }
9427 }
9428 return VINF_SUCCESS;
9429}
9430
9431
9432/** Opcode 0x0f 0xbc. */
9433FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
9434{
9435 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
9436 IEMOP_HLP_MIN_386();
9437 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9438 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
9439}
9440
9441
9442/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
9443FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
9444{
9445 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9446 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
9447 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9448
9449#ifndef TST_IEM_CHECK_MC
9450 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
9451 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
9452 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
9453 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
9454 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
9455 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
9456 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
9457 {
9458 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
9459 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
9460 };
9461#endif
9462 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9463 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9464 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9465}
9466
9467
9468/** Opcode 0x0f 0xbd. */
9469FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
9470{
9471 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
9472 IEMOP_HLP_MIN_386();
9473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9474 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
9475}
9476
9477
9478/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
9479FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
9480{
9481 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9482 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
9483 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9484
9485#ifndef TST_IEM_CHECK_MC
9486 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
9487 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
9488 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
9489 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
9490 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
9491 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
9492 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
9493 {
9494 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
9495 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
9496 };
9497#endif
9498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9499 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9500 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9501}
9502
9503
9504
9505/** Opcode 0x0f 0xbe. */
9506FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
9507{
9508 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
9509 IEMOP_HLP_MIN_386();
9510
9511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9512
9513 /*
9514 * If rm is denoting a register, no more instruction bytes.
9515 */
9516 if (IEM_IS_MODRM_REG_MODE(bRm))
9517 {
9518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9519 switch (pVCpu->iem.s.enmEffOpSize)
9520 {
9521 case IEMMODE_16BIT:
9522 IEM_MC_BEGIN(0, 1);
9523 IEM_MC_LOCAL(uint16_t, u16Value);
9524 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9525 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9526 IEM_MC_ADVANCE_RIP();
9527 IEM_MC_END();
9528 return VINF_SUCCESS;
9529
9530 case IEMMODE_32BIT:
9531 IEM_MC_BEGIN(0, 1);
9532 IEM_MC_LOCAL(uint32_t, u32Value);
9533 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9534 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9535 IEM_MC_ADVANCE_RIP();
9536 IEM_MC_END();
9537 return VINF_SUCCESS;
9538
9539 case IEMMODE_64BIT:
9540 IEM_MC_BEGIN(0, 1);
9541 IEM_MC_LOCAL(uint64_t, u64Value);
9542 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9543 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9544 IEM_MC_ADVANCE_RIP();
9545 IEM_MC_END();
9546 return VINF_SUCCESS;
9547
9548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9549 }
9550 }
9551 else
9552 {
9553 /*
9554 * We're loading a register from memory.
9555 */
9556 switch (pVCpu->iem.s.enmEffOpSize)
9557 {
9558 case IEMMODE_16BIT:
9559 IEM_MC_BEGIN(0, 2);
9560 IEM_MC_LOCAL(uint16_t, u16Value);
9561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9564 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9565 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9566 IEM_MC_ADVANCE_RIP();
9567 IEM_MC_END();
9568 return VINF_SUCCESS;
9569
9570 case IEMMODE_32BIT:
9571 IEM_MC_BEGIN(0, 2);
9572 IEM_MC_LOCAL(uint32_t, u32Value);
9573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9576 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9577 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9578 IEM_MC_ADVANCE_RIP();
9579 IEM_MC_END();
9580 return VINF_SUCCESS;
9581
9582 case IEMMODE_64BIT:
9583 IEM_MC_BEGIN(0, 2);
9584 IEM_MC_LOCAL(uint64_t, u64Value);
9585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9588 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9589 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9590 IEM_MC_ADVANCE_RIP();
9591 IEM_MC_END();
9592 return VINF_SUCCESS;
9593
9594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9595 }
9596 }
9597}
9598
9599
9600/** Opcode 0x0f 0xbf. */
9601FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
9602{
9603 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
9604 IEMOP_HLP_MIN_386();
9605
9606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9607
9608 /** @todo Not entirely sure how the operand size prefix is handled here,
9609 * assuming that it will be ignored. Would be nice to have a few
9610 * test for this. */
9611 /*
9612 * If rm is denoting a register, no more instruction bytes.
9613 */
9614 if (IEM_IS_MODRM_REG_MODE(bRm))
9615 {
9616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9617 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9618 {
9619 IEM_MC_BEGIN(0, 1);
9620 IEM_MC_LOCAL(uint32_t, u32Value);
9621 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9622 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9623 IEM_MC_ADVANCE_RIP();
9624 IEM_MC_END();
9625 }
9626 else
9627 {
9628 IEM_MC_BEGIN(0, 1);
9629 IEM_MC_LOCAL(uint64_t, u64Value);
9630 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9631 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9632 IEM_MC_ADVANCE_RIP();
9633 IEM_MC_END();
9634 }
9635 }
9636 else
9637 {
9638 /*
9639 * We're loading a register from memory.
9640 */
9641 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9642 {
9643 IEM_MC_BEGIN(0, 2);
9644 IEM_MC_LOCAL(uint32_t, u32Value);
9645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9648 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9649 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9650 IEM_MC_ADVANCE_RIP();
9651 IEM_MC_END();
9652 }
9653 else
9654 {
9655 IEM_MC_BEGIN(0, 2);
9656 IEM_MC_LOCAL(uint64_t, u64Value);
9657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9660 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9661 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9662 IEM_MC_ADVANCE_RIP();
9663 IEM_MC_END();
9664 }
9665 }
9666 return VINF_SUCCESS;
9667}
9668
9669
9670/** Opcode 0x0f 0xc0. */
9671FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
9672{
9673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9674 IEMOP_HLP_MIN_486();
9675 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
9676
9677 /*
9678 * If rm is denoting a register, no more instruction bytes.
9679 */
9680 if (IEM_IS_MODRM_REG_MODE(bRm))
9681 {
9682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9683
9684 IEM_MC_BEGIN(3, 0);
9685 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9686 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9687 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9688
9689 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9690 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9691 IEM_MC_REF_EFLAGS(pEFlags);
9692 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9693
9694 IEM_MC_ADVANCE_RIP();
9695 IEM_MC_END();
9696 }
9697 else
9698 {
9699 /*
9700 * We're accessing memory.
9701 */
9702 IEM_MC_BEGIN(3, 3);
9703 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9704 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9705 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9706 IEM_MC_LOCAL(uint8_t, u8RegCopy);
9707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9708
9709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9710 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9711 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9712 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
9713 IEM_MC_FETCH_EFLAGS(EFlags);
9714 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9716 else
9717 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
9718
9719 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9720 IEM_MC_COMMIT_EFLAGS(EFlags);
9721 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
9722 IEM_MC_ADVANCE_RIP();
9723 IEM_MC_END();
9724 return VINF_SUCCESS;
9725 }
9726 return VINF_SUCCESS;
9727}
9728
9729
9730/** Opcode 0x0f 0xc1. */
9731FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
9732{
9733 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
9734 IEMOP_HLP_MIN_486();
9735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9736
9737 /*
9738 * If rm is denoting a register, no more instruction bytes.
9739 */
9740 if (IEM_IS_MODRM_REG_MODE(bRm))
9741 {
9742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9743
9744 switch (pVCpu->iem.s.enmEffOpSize)
9745 {
9746 case IEMMODE_16BIT:
9747 IEM_MC_BEGIN(3, 0);
9748 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9749 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9750 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9751
9752 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9753 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9754 IEM_MC_REF_EFLAGS(pEFlags);
9755 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9756
9757 IEM_MC_ADVANCE_RIP();
9758 IEM_MC_END();
9759 return VINF_SUCCESS;
9760
9761 case IEMMODE_32BIT:
9762 IEM_MC_BEGIN(3, 0);
9763 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9764 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9765 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9766
9767 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9768 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9769 IEM_MC_REF_EFLAGS(pEFlags);
9770 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9771
9772 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9773 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9774 IEM_MC_ADVANCE_RIP();
9775 IEM_MC_END();
9776 return VINF_SUCCESS;
9777
9778 case IEMMODE_64BIT:
9779 IEM_MC_BEGIN(3, 0);
9780 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9781 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9783
9784 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9785 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9786 IEM_MC_REF_EFLAGS(pEFlags);
9787 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9788
9789 IEM_MC_ADVANCE_RIP();
9790 IEM_MC_END();
9791 return VINF_SUCCESS;
9792
9793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9794 }
9795 }
9796 else
9797 {
9798 /*
9799 * We're accessing memory.
9800 */
9801 switch (pVCpu->iem.s.enmEffOpSize)
9802 {
9803 case IEMMODE_16BIT:
9804 IEM_MC_BEGIN(3, 3);
9805 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9806 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9807 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9808 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9810
9811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9812 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9813 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9814 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9815 IEM_MC_FETCH_EFLAGS(EFlags);
9816 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9817 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9818 else
9819 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9820
9821 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9822 IEM_MC_COMMIT_EFLAGS(EFlags);
9823 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9824 IEM_MC_ADVANCE_RIP();
9825 IEM_MC_END();
9826 return VINF_SUCCESS;
9827
9828 case IEMMODE_32BIT:
9829 IEM_MC_BEGIN(3, 3);
9830 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9831 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9832 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9833 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9835
9836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9837 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9838 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9839 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9840 IEM_MC_FETCH_EFLAGS(EFlags);
9841 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9842 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9843 else
9844 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9845
9846 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9847 IEM_MC_COMMIT_EFLAGS(EFlags);
9848 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9849 IEM_MC_ADVANCE_RIP();
9850 IEM_MC_END();
9851 return VINF_SUCCESS;
9852
9853 case IEMMODE_64BIT:
9854 IEM_MC_BEGIN(3, 3);
9855 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9856 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9857 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9858 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9860
9861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9862 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9863 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9864 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9865 IEM_MC_FETCH_EFLAGS(EFlags);
9866 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9867 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9868 else
9869 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9870
9871 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9872 IEM_MC_COMMIT_EFLAGS(EFlags);
9873 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9874 IEM_MC_ADVANCE_RIP();
9875 IEM_MC_END();
9876 return VINF_SUCCESS;
9877
9878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9879 }
9880 }
9881}
9882
9883
9884/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9885FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9886/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9887FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9888/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9889FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9890/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9891FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9892
9893
9894/** Opcode 0x0f 0xc3. */
9895FNIEMOP_DEF(iemOp_movnti_My_Gy)
9896{
9897 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9898
9899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9900
9901 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9902 if (IEM_IS_MODRM_MEM_MODE(bRm))
9903 {
9904 switch (pVCpu->iem.s.enmEffOpSize)
9905 {
9906 case IEMMODE_32BIT:
9907 IEM_MC_BEGIN(0, 2);
9908 IEM_MC_LOCAL(uint32_t, u32Value);
9909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9910
9911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9913 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9914 return IEMOP_RAISE_INVALID_OPCODE();
9915
9916 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9917 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9918 IEM_MC_ADVANCE_RIP();
9919 IEM_MC_END();
9920 break;
9921
9922 case IEMMODE_64BIT:
9923 IEM_MC_BEGIN(0, 2);
9924 IEM_MC_LOCAL(uint64_t, u64Value);
9925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9926
9927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9929 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9930 return IEMOP_RAISE_INVALID_OPCODE();
9931
9932 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9933 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9934 IEM_MC_ADVANCE_RIP();
9935 IEM_MC_END();
9936 break;
9937
9938 case IEMMODE_16BIT:
9939 /** @todo check this form. */
9940 return IEMOP_RAISE_INVALID_OPCODE();
9941 }
9942 }
9943 else
9944 return IEMOP_RAISE_INVALID_OPCODE();
9945 return VINF_SUCCESS;
9946}
9947
9948
9949/* Opcode 0x66 0x0f 0xc3 - invalid */
9950/* Opcode 0xf3 0x0f 0xc3 - invalid */
9951/* Opcode 0xf2 0x0f 0xc3 - invalid */
9952
9953
9954/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9955FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
9956{
9957 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9959 if (IEM_IS_MODRM_REG_MODE(bRm))
9960 {
9961 /*
9962 * Register, register.
9963 */
9964 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
9965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9966 IEM_MC_BEGIN(3, 0);
9967 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9968 IEM_MC_ARG(uint16_t, u16Src, 1);
9969 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
9970 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9971 IEM_MC_PREPARE_FPU_USAGE();
9972 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9973 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9974 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
9975 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
9976 IEM_MC_FPU_TO_MMX_MODE();
9977 IEM_MC_ADVANCE_RIP();
9978 IEM_MC_END();
9979 }
9980 else
9981 {
9982 /*
9983 * Register, memory.
9984 */
9985 IEM_MC_BEGIN(3, 2);
9986 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9987 IEM_MC_ARG(uint16_t, u16Src, 1);
9988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9989
9990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9991 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
9992 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
9993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9994 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
9995 IEM_MC_PREPARE_FPU_USAGE();
9996
9997 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9998 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9999 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bEvilArg);
10000 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
10001 IEM_MC_FPU_TO_MMX_MODE();
10002 IEM_MC_ADVANCE_RIP();
10003 IEM_MC_END();
10004 }
10005 return VINF_SUCCESS;
10006}
10007
10008
10009/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
10010FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
10011{
10012 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10014 if (IEM_IS_MODRM_REG_MODE(bRm))
10015 {
10016 /*
10017 * Register, register.
10018 */
10019 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
10020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10021 IEM_MC_BEGIN(3, 0);
10022 IEM_MC_ARG(PRTUINT128U, puDst, 0);
10023 IEM_MC_ARG(uint16_t, u16Src, 1);
10024 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
10025 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10026 IEM_MC_PREPARE_SSE_USAGE();
10027 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10028 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
10029 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
10030 IEM_MC_ADVANCE_RIP();
10031 IEM_MC_END();
10032 }
10033 else
10034 {
10035 /*
10036 * Register, memory.
10037 */
10038 IEM_MC_BEGIN(3, 2);
10039 IEM_MC_ARG(PRTUINT128U, puDst, 0);
10040 IEM_MC_ARG(uint16_t, u16Src, 1);
10041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10042
10043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10044 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
10045 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
10046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10047 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10048 IEM_MC_PREPARE_SSE_USAGE();
10049
10050 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10051 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10052 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bEvilArg);
10053 IEM_MC_ADVANCE_RIP();
10054 IEM_MC_END();
10055 }
10056 return VINF_SUCCESS;
10057}
10058
10059
10060/* Opcode 0xf3 0x0f 0xc4 - invalid */
10061/* Opcode 0xf2 0x0f 0xc4 - invalid */
10062
10063/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
10064FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
10065/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
10066FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
10067/* Opcode 0xf3 0x0f 0xc5 - invalid */
10068/* Opcode 0xf2 0x0f 0xc5 - invalid */
10069
10070/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
10071FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
10072{
10073 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10075 if (IEM_IS_MODRM_REG_MODE(bRm))
10076 {
10077 /*
10078 * Register, register.
10079 */
10080 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
10081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10082 IEM_MC_BEGIN(3, 0);
10083 IEM_MC_ARG(PRTUINT128U, pDst, 0);
10084 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
10085 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
10086 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
10087 IEM_MC_PREPARE_SSE_USAGE();
10088 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10089 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10090 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
10091 IEM_MC_ADVANCE_RIP();
10092 IEM_MC_END();
10093 }
10094 else
10095 {
10096 /*
10097 * Register, memory.
10098 */
10099 IEM_MC_BEGIN(3, 2);
10100 IEM_MC_ARG(PRTUINT128U, pDst, 0);
10101 IEM_MC_LOCAL(RTUINT128U, uSrc);
10102 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
10103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10104
10105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10106 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
10107 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
10108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10109 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
10110 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10111
10112 IEM_MC_PREPARE_SSE_USAGE();
10113 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10114 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
10115
10116 IEM_MC_ADVANCE_RIP();
10117 IEM_MC_END();
10118 }
10119 return VINF_SUCCESS;
10120}
10121
10122
10123/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
10124FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
10125{
10126 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10128 if (IEM_IS_MODRM_REG_MODE(bRm))
10129 {
10130 /*
10131 * Register, register.
10132 */
10133 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
10134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10135 IEM_MC_BEGIN(3, 0);
10136 IEM_MC_ARG(PRTUINT128U, pDst, 0);
10137 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
10138 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
10139 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10140 IEM_MC_PREPARE_SSE_USAGE();
10141 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10142 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10143 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
10144 IEM_MC_ADVANCE_RIP();
10145 IEM_MC_END();
10146 }
10147 else
10148 {
10149 /*
10150 * Register, memory.
10151 */
10152 IEM_MC_BEGIN(3, 2);
10153 IEM_MC_ARG(PRTUINT128U, pDst, 0);
10154 IEM_MC_LOCAL(RTUINT128U, uSrc);
10155 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
10156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10157
10158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10159 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
10160 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
10161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10162 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10163 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10164
10165 IEM_MC_PREPARE_SSE_USAGE();
10166 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10167 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
10168
10169 IEM_MC_ADVANCE_RIP();
10170 IEM_MC_END();
10171 }
10172 return VINF_SUCCESS;
10173}
10174
10175
10176/* Opcode 0xf3 0x0f 0xc6 - invalid */
10177/* Opcode 0xf2 0x0f 0xc6 - invalid */
10178
10179
10180/** Opcode 0x0f 0xc7 !11/1. */
10181FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
10182{
10183 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
10184
10185 IEM_MC_BEGIN(4, 3);
10186 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
10187 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
10188 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
10189 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
10190 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
10191 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
10192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10193
10194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10195 IEMOP_HLP_DONE_DECODING();
10196 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10197
10198 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
10199 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
10200 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
10201
10202 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
10203 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
10204 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
10205
10206 IEM_MC_FETCH_EFLAGS(EFlags);
10207 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10208 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
10209 else
10210 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
10211
10212 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
10213 IEM_MC_COMMIT_EFLAGS(EFlags);
10214 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10215 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
10216 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
10217 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
10218 IEM_MC_ENDIF();
10219 IEM_MC_ADVANCE_RIP();
10220
10221 IEM_MC_END();
10222 return VINF_SUCCESS;
10223}
10224
10225
10226/** Opcode REX.W 0x0f 0xc7 !11/1. */
10227FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
10228{
10229 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
10230 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
10231 {
10232#if 0
10233 RT_NOREF(bRm);
10234 IEMOP_BITCH_ABOUT_STUB();
10235 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
10236#else
10237 IEM_MC_BEGIN(4, 3);
10238 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
10239 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
10240 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
10241 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
10242 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
10243 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
10244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10245
10246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10247 IEMOP_HLP_DONE_DECODING();
10248 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
10249 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10250
10251 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
10252 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
10253 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
10254
10255 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
10256 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
10257 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
10258
10259 IEM_MC_FETCH_EFLAGS(EFlags);
10260# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
10261# if defined(RT_ARCH_AMD64)
10262 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
10263# endif
10264 {
10265 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10266 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10267 else
10268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10269 }
10270# if defined(RT_ARCH_AMD64)
10271 else
10272# endif
10273# endif
10274# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
10275 {
10276 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
10277 accesses and not all all atomic, which works fine on in UNI CPU guest
10278 configuration (ignoring DMA). If guest SMP is active we have no choice
10279 but to use a rendezvous callback here. Sigh. */
10280 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
10281 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10282 else
10283 {
10284 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10285 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
10286 }
10287 }
10288# endif
10289
10290 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
10291 IEM_MC_COMMIT_EFLAGS(EFlags);
10292 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10293 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
10294 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
10295 IEM_MC_ENDIF();
10296 IEM_MC_ADVANCE_RIP();
10297
10298 IEM_MC_END();
10299 return VINF_SUCCESS;
10300#endif
10301 }
10302 Log(("cmpxchg16b -> #UD\n"));
10303 return IEMOP_RAISE_INVALID_OPCODE();
10304}
10305
10306FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
10307{
10308 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
10309 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
10310 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
10311}
10312
10313/** Opcode 0x0f 0xc7 11/6. */
10314FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
10315
10316/** Opcode 0x0f 0xc7 !11/6. */
10317#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10318FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
10319{
10320 IEMOP_MNEMONIC(vmptrld, "vmptrld");
10321 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
10322 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
10323 IEM_MC_BEGIN(2, 0);
10324 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10325 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
10326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10327 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
10328 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10329 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
10330 IEM_MC_END();
10331 return VINF_SUCCESS;
10332}
10333#else
10334FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
10335#endif
10336
10337/** Opcode 0x66 0x0f 0xc7 !11/6. */
10338#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10339FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
10340{
10341 IEMOP_MNEMONIC(vmclear, "vmclear");
10342 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
10343 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
10344 IEM_MC_BEGIN(2, 0);
10345 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10346 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
10347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10348 IEMOP_HLP_DONE_DECODING();
10349 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10350 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
10351 IEM_MC_END();
10352 return VINF_SUCCESS;
10353}
10354#else
10355FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
10356#endif
10357
10358/** Opcode 0xf3 0x0f 0xc7 !11/6. */
10359#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10360FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
10361{
10362 IEMOP_MNEMONIC(vmxon, "vmxon");
10363 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
10364 IEM_MC_BEGIN(2, 0);
10365 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10366 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
10367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10368 IEMOP_HLP_DONE_DECODING();
10369 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10370 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
10371 IEM_MC_END();
10372 return VINF_SUCCESS;
10373}
10374#else
10375FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
10376#endif
10377
10378/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
10379#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10380FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
10381{
10382 IEMOP_MNEMONIC(vmptrst, "vmptrst");
10383 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
10384 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
10385 IEM_MC_BEGIN(2, 0);
10386 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10387 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
10388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10389 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
10390 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10391 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
10392 IEM_MC_END();
10393 return VINF_SUCCESS;
10394}
10395#else
10396FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
10397#endif
10398
10399/** Opcode 0x0f 0xc7 11/7. */
10400FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
10401
10402
10403/**
10404 * Group 9 jump table for register variant.
10405 */
10406IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
10407{ /* pfx: none, 066h, 0f3h, 0f2h */
10408 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
10409 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
10410 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
10411 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
10412 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10413 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
10414 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10415 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10416};
10417AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
10418
10419
10420/**
10421 * Group 9 jump table for memory variant.
10422 */
10423IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
10424{ /* pfx: none, 066h, 0f3h, 0f2h */
10425 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
10426 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
10427 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
10428 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
10429 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10430 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
10431 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
10432 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10433};
10434AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
10435
10436
10437/** Opcode 0x0f 0xc7. */
10438FNIEMOP_DEF(iemOp_Grp9)
10439{
10440 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
10441 if (IEM_IS_MODRM_REG_MODE(bRm))
10442 /* register, register */
10443 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10444 + pVCpu->iem.s.idxPrefix], bRm);
10445 /* memory, register */
10446 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10447 + pVCpu->iem.s.idxPrefix], bRm);
10448}
10449
10450
10451/**
10452 * Common 'bswap register' helper.
10453 */
10454FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
10455{
10456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10457 switch (pVCpu->iem.s.enmEffOpSize)
10458 {
10459 case IEMMODE_16BIT:
10460 IEM_MC_BEGIN(1, 0);
10461 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10462 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
10463 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
10464 IEM_MC_ADVANCE_RIP();
10465 IEM_MC_END();
10466 return VINF_SUCCESS;
10467
10468 case IEMMODE_32BIT:
10469 IEM_MC_BEGIN(1, 0);
10470 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10471 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
10472 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10473 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
10474 IEM_MC_ADVANCE_RIP();
10475 IEM_MC_END();
10476 return VINF_SUCCESS;
10477
10478 case IEMMODE_64BIT:
10479 IEM_MC_BEGIN(1, 0);
10480 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10481 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
10482 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
10483 IEM_MC_ADVANCE_RIP();
10484 IEM_MC_END();
10485 return VINF_SUCCESS;
10486
10487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10488 }
10489}
10490
10491
10492/** Opcode 0x0f 0xc8. */
10493FNIEMOP_DEF(iemOp_bswap_rAX_r8)
10494{
10495 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
10496 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
10497 prefix. REX.B is the correct prefix it appears. For a parallel
10498 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
10499 IEMOP_HLP_MIN_486();
10500 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
10501}
10502
10503
10504/** Opcode 0x0f 0xc9. */
10505FNIEMOP_DEF(iemOp_bswap_rCX_r9)
10506{
10507 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
10508 IEMOP_HLP_MIN_486();
10509 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
10510}
10511
10512
10513/** Opcode 0x0f 0xca. */
10514FNIEMOP_DEF(iemOp_bswap_rDX_r10)
10515{
10516 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
10517 IEMOP_HLP_MIN_486();
10518 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
10519}
10520
10521
10522/** Opcode 0x0f 0xcb. */
10523FNIEMOP_DEF(iemOp_bswap_rBX_r11)
10524{
10525 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
10526 IEMOP_HLP_MIN_486();
10527 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
10528}
10529
10530
10531/** Opcode 0x0f 0xcc. */
10532FNIEMOP_DEF(iemOp_bswap_rSP_r12)
10533{
10534 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
10535 IEMOP_HLP_MIN_486();
10536 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
10537}
10538
10539
10540/** Opcode 0x0f 0xcd. */
10541FNIEMOP_DEF(iemOp_bswap_rBP_r13)
10542{
10543 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
10544 IEMOP_HLP_MIN_486();
10545 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
10546}
10547
10548
10549/** Opcode 0x0f 0xce. */
10550FNIEMOP_DEF(iemOp_bswap_rSI_r14)
10551{
10552 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
10553 IEMOP_HLP_MIN_486();
10554 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
10555}
10556
10557
10558/** Opcode 0x0f 0xcf. */
10559FNIEMOP_DEF(iemOp_bswap_rDI_r15)
10560{
10561 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
10562 IEMOP_HLP_MIN_486();
10563 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
10564}
10565
10566
10567/* Opcode 0x0f 0xd0 - invalid */
10568
10569
10570/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
10571FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
10572{
10573 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
10574 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
10575}
10576
10577
10578/* Opcode 0xf3 0x0f 0xd0 - invalid */
10579
10580
10581/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
10582FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
10583{
10584 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
10585 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
10586}
10587
10588
10589
10590/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
10591FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
10592{
10593 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10594 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
10595}
10596
10597/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
10598FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
10599{
10600 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10601 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
10602}
10603
10604/* Opcode 0xf3 0x0f 0xd1 - invalid */
10605/* Opcode 0xf2 0x0f 0xd1 - invalid */
10606
10607/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
10608FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
10609{
10610 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10611 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
10612}
10613
10614
10615/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
10616FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
10617{
10618 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10619 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
10620}
10621
10622
10623/* Opcode 0xf3 0x0f 0xd2 - invalid */
10624/* Opcode 0xf2 0x0f 0xd2 - invalid */
10625
10626/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
10627FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
10628{
10629 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10630 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
10631}
10632
10633
10634/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
10635FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
10636{
10637 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10638 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
10639}
10640
10641
10642/* Opcode 0xf3 0x0f 0xd3 - invalid */
10643/* Opcode 0xf2 0x0f 0xd3 - invalid */
10644
10645
10646/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
10647FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
10648{
10649 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10650 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10651}
10652
10653
10654/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
10655FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
10656{
10657 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10658 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
10659}
10660
10661
10662/* Opcode 0xf3 0x0f 0xd4 - invalid */
10663/* Opcode 0xf2 0x0f 0xd4 - invalid */
10664
10665/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
10666FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
10667{
10668 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10669 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
10670}
10671
10672/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
10673FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
10674{
10675 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10676 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
10677}
10678
10679
10680/* Opcode 0xf3 0x0f 0xd5 - invalid */
10681/* Opcode 0xf2 0x0f 0xd5 - invalid */
10682
10683/* Opcode 0x0f 0xd6 - invalid */
10684
10685/**
10686 * @opcode 0xd6
10687 * @oppfx 0x66
10688 * @opcpuid sse2
10689 * @opgroup og_sse2_pcksclr_datamove
10690 * @opxcpttype none
10691 * @optest op1=-1 op2=2 -> op1=2
10692 * @optest op1=0 op2=-42 -> op1=-42
10693 */
10694FNIEMOP_DEF(iemOp_movq_Wq_Vq)
10695{
10696 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10698 if (IEM_IS_MODRM_REG_MODE(bRm))
10699 {
10700 /*
10701 * Register, register.
10702 */
10703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10704 IEM_MC_BEGIN(0, 2);
10705 IEM_MC_LOCAL(uint64_t, uSrc);
10706
10707 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10708 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
10709
10710 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10711 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
10712
10713 IEM_MC_ADVANCE_RIP();
10714 IEM_MC_END();
10715 }
10716 else
10717 {
10718 /*
10719 * Memory, register.
10720 */
10721 IEM_MC_BEGIN(0, 2);
10722 IEM_MC_LOCAL(uint64_t, uSrc);
10723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10724
10725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10727 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10728 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10729
10730 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10731 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10732
10733 IEM_MC_ADVANCE_RIP();
10734 IEM_MC_END();
10735 }
10736 return VINF_SUCCESS;
10737}
10738
10739
10740/**
10741 * @opcode 0xd6
10742 * @opcodesub 11 mr/reg
10743 * @oppfx f3
10744 * @opcpuid sse2
10745 * @opgroup og_sse2_simdint_datamove
10746 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10747 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10748 */
10749FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
10750{
10751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10752 if (IEM_IS_MODRM_REG_MODE(bRm))
10753 {
10754 /*
10755 * Register, register.
10756 */
10757 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10759 IEM_MC_BEGIN(0, 1);
10760 IEM_MC_LOCAL(uint64_t, uSrc);
10761
10762 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10763 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10764
10765 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
10766 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
10767 IEM_MC_FPU_TO_MMX_MODE();
10768
10769 IEM_MC_ADVANCE_RIP();
10770 IEM_MC_END();
10771 return VINF_SUCCESS;
10772 }
10773
10774 /**
10775 * @opdone
10776 * @opmnemonic udf30fd6mem
10777 * @opcode 0xd6
10778 * @opcodesub !11 mr/reg
10779 * @oppfx f3
10780 * @opunused intel-modrm
10781 * @opcpuid sse
10782 * @optest ->
10783 */
10784 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10785}
10786
10787
10788/**
10789 * @opcode 0xd6
10790 * @opcodesub 11 mr/reg
10791 * @oppfx f2
10792 * @opcpuid sse2
10793 * @opgroup og_sse2_simdint_datamove
10794 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10795 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10796 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
10797 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
10798 * @optest op1=-42 op2=0xfedcba9876543210
10799 * -> op1=0xfedcba9876543210 ftw=0xff
10800 */
10801FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
10802{
10803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10804 if (IEM_IS_MODRM_REG_MODE(bRm))
10805 {
10806 /*
10807 * Register, register.
10808 */
10809 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10811 IEM_MC_BEGIN(0, 1);
10812 IEM_MC_LOCAL(uint64_t, uSrc);
10813
10814 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10815 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10816
10817 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10818 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
10819 IEM_MC_FPU_TO_MMX_MODE();
10820
10821 IEM_MC_ADVANCE_RIP();
10822 IEM_MC_END();
10823 return VINF_SUCCESS;
10824 }
10825
10826 /**
10827 * @opdone
10828 * @opmnemonic udf20fd6mem
10829 * @opcode 0xd6
10830 * @opcodesub !11 mr/reg
10831 * @oppfx f2
10832 * @opunused intel-modrm
10833 * @opcpuid sse
10834 * @optest ->
10835 */
10836 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10837}
10838
10839
10840/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
10841FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
10842{
10843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10844 /* Docs says register only. */
10845 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10846 {
10847 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10848 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
10849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10850 IEM_MC_BEGIN(2, 0);
10851 IEM_MC_ARG(uint64_t *, puDst, 0);
10852 IEM_MC_ARG(uint64_t const *, puSrc, 1);
10853 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
10854 IEM_MC_PREPARE_FPU_USAGE();
10855 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
10856 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
10857 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
10858 IEM_MC_FPU_TO_MMX_MODE();
10859 IEM_MC_ADVANCE_RIP();
10860 IEM_MC_END();
10861 return VINF_SUCCESS;
10862 }
10863 return IEMOP_RAISE_INVALID_OPCODE();
10864}
10865
10866
10867/** Opcode 0x66 0x0f 0xd7 - */
10868FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
10869{
10870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10871 /* Docs says register only. */
10872 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10873 {
10874 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10875 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
10876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10877 IEM_MC_BEGIN(2, 0);
10878 IEM_MC_ARG(uint64_t *, puDst, 0);
10879 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
10880 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10881 IEM_MC_PREPARE_SSE_USAGE();
10882 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10883 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10884 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
10885 IEM_MC_ADVANCE_RIP();
10886 IEM_MC_END();
10887 return VINF_SUCCESS;
10888 }
10889 return IEMOP_RAISE_INVALID_OPCODE();
10890}
10891
10892
10893/* Opcode 0xf3 0x0f 0xd7 - invalid */
10894/* Opcode 0xf2 0x0f 0xd7 - invalid */
10895
10896
10897/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
10898FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
10899{
10900 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10901 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
10902}
10903
10904
10905/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
10906FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
10907{
10908 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10909 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
10910}
10911
10912
10913/* Opcode 0xf3 0x0f 0xd8 - invalid */
10914/* Opcode 0xf2 0x0f 0xd8 - invalid */
10915
10916/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
10917FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
10918{
10919 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10920 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
10921}
10922
10923
10924/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
10925FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
10926{
10927 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10928 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
10929}
10930
10931
10932/* Opcode 0xf3 0x0f 0xd9 - invalid */
10933/* Opcode 0xf2 0x0f 0xd9 - invalid */
10934
10935/** Opcode 0x0f 0xda - pminub Pq, Qq */
10936FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
10937{
10938 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10939 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
10940}
10941
10942
10943/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
10944FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
10945{
10946 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10947 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
10948}
10949
10950/* Opcode 0xf3 0x0f 0xda - invalid */
10951/* Opcode 0xf2 0x0f 0xda - invalid */
10952
10953/** Opcode 0x0f 0xdb - pand Pq, Qq */
10954FNIEMOP_DEF(iemOp_pand_Pq_Qq)
10955{
10956 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10957 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
10958}
10959
10960
10961/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
10962FNIEMOP_DEF(iemOp_pand_Vx_Wx)
10963{
10964 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10965 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
10966}
10967
10968
10969/* Opcode 0xf3 0x0f 0xdb - invalid */
10970/* Opcode 0xf2 0x0f 0xdb - invalid */
10971
10972/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
10973FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
10974{
10975 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10976 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
10977}
10978
10979
10980/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10981FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10982{
10983 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10984 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10985}
10986
10987
10988/* Opcode 0xf3 0x0f 0xdc - invalid */
10989/* Opcode 0xf2 0x0f 0xdc - invalid */
10990
10991/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10992FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10993{
10994 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10995 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10996}
10997
10998
10999/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
11000FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
11001{
11002 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11003 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
11004}
11005
11006
11007/* Opcode 0xf3 0x0f 0xdd - invalid */
11008/* Opcode 0xf2 0x0f 0xdd - invalid */
11009
11010/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
11011FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
11012{
11013 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11014 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
11015}
11016
11017
11018/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
11019FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
11020{
11021 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11022 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
11023}
11024
11025/* Opcode 0xf3 0x0f 0xde - invalid */
11026/* Opcode 0xf2 0x0f 0xde - invalid */
11027
11028
11029/** Opcode 0x0f 0xdf - pandn Pq, Qq */
11030FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
11031{
11032 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11033 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
11034}
11035
11036
11037/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
11038FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
11039{
11040 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11041 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
11042}
11043
11044
11045/* Opcode 0xf3 0x0f 0xdf - invalid */
11046/* Opcode 0xf2 0x0f 0xdf - invalid */
11047
11048/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
11049FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
11050{
11051 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11052 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
11053}
11054
11055
11056/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
11057FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
11058{
11059 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11060 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
11061}
11062
11063
11064/* Opcode 0xf3 0x0f 0xe0 - invalid */
11065/* Opcode 0xf2 0x0f 0xe0 - invalid */
11066
11067/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
11068FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
11069{
11070 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
11071 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
11072}
11073
11074
11075/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
11076FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
11077{
11078 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11079 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
11080}
11081
11082
11083/* Opcode 0xf3 0x0f 0xe1 - invalid */
11084/* Opcode 0xf2 0x0f 0xe1 - invalid */
11085
11086/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
11087FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
11088{
11089 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
11090 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
11091}
11092
11093
11094/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
11095FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
11096{
11097 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11098 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
11099}
11100
11101
11102/* Opcode 0xf3 0x0f 0xe2 - invalid */
11103/* Opcode 0xf2 0x0f 0xe2 - invalid */
11104
11105/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
11106FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
11107{
11108 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11109 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
11110}
11111
11112
11113/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
11114FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
11115{
11116 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11117 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
11118}
11119
11120
11121/* Opcode 0xf3 0x0f 0xe3 - invalid */
11122/* Opcode 0xf2 0x0f 0xe3 - invalid */
11123
11124/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
11125FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
11126{
11127 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11128 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
11129}
11130
11131
11132/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
11133FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
11134{
11135 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11136 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
11137}
11138
11139
11140/* Opcode 0xf3 0x0f 0xe4 - invalid */
11141/* Opcode 0xf2 0x0f 0xe4 - invalid */
11142
11143/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
11144FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
11145{
11146 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11147 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
11148}
11149
11150
11151/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
11152FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
11153{
11154 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11155 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
11156}
11157
11158
11159/* Opcode 0xf3 0x0f 0xe5 - invalid */
11160/* Opcode 0xf2 0x0f 0xe5 - invalid */
11161
11162/* Opcode 0x0f 0xe6 - invalid */
11163/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
11164FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
11165/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
11166FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
11167/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
11168FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
11169
11170
11171/**
11172 * @opcode 0xe7
11173 * @opcodesub !11 mr/reg
11174 * @oppfx none
11175 * @opcpuid sse
11176 * @opgroup og_sse1_cachect
11177 * @opxcpttype none
11178 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
11179 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
11180 */
11181FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
11182{
11183 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11185 if (IEM_IS_MODRM_MEM_MODE(bRm))
11186 {
11187 /* Register, memory. */
11188 IEM_MC_BEGIN(0, 2);
11189 IEM_MC_LOCAL(uint64_t, uSrc);
11190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11191
11192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11194 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11195 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11196
11197 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
11198 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
11199 IEM_MC_FPU_TO_MMX_MODE();
11200
11201 IEM_MC_ADVANCE_RIP();
11202 IEM_MC_END();
11203 return VINF_SUCCESS;
11204 }
11205 /**
11206 * @opdone
11207 * @opmnemonic ud0fe7reg
11208 * @opcode 0xe7
11209 * @opcodesub 11 mr/reg
11210 * @oppfx none
11211 * @opunused immediate
11212 * @opcpuid sse
11213 * @optest ->
11214 */
11215 return IEMOP_RAISE_INVALID_OPCODE();
11216}
11217
11218/**
11219 * @opcode 0xe7
11220 * @opcodesub !11 mr/reg
11221 * @oppfx 0x66
11222 * @opcpuid sse2
11223 * @opgroup og_sse2_cachect
11224 * @opxcpttype 1
11225 * @optest op1=-1 op2=2 -> op1=2
11226 * @optest op1=0 op2=-42 -> op1=-42
11227 */
11228FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
11229{
11230 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11232 if (IEM_IS_MODRM_MEM_MODE(bRm))
11233 {
11234 /* Register, memory. */
11235 IEM_MC_BEGIN(0, 2);
11236 IEM_MC_LOCAL(RTUINT128U, uSrc);
11237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11238
11239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11241 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11242 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
11243
11244 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
11245 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
11246
11247 IEM_MC_ADVANCE_RIP();
11248 IEM_MC_END();
11249 return VINF_SUCCESS;
11250 }
11251
11252 /**
11253 * @opdone
11254 * @opmnemonic ud660fe7reg
11255 * @opcode 0xe7
11256 * @opcodesub 11 mr/reg
11257 * @oppfx 0x66
11258 * @opunused immediate
11259 * @opcpuid sse
11260 * @optest ->
11261 */
11262 return IEMOP_RAISE_INVALID_OPCODE();
11263}
11264
11265/* Opcode 0xf3 0x0f 0xe7 - invalid */
11266/* Opcode 0xf2 0x0f 0xe7 - invalid */
11267
11268
11269/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
11270FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
11271{
11272 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11273 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
11274}
11275
11276
11277/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
11278FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
11279{
11280 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11281 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
11282}
11283
11284
11285/* Opcode 0xf3 0x0f 0xe8 - invalid */
11286/* Opcode 0xf2 0x0f 0xe8 - invalid */
11287
11288/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
11289FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
11290{
11291 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11292 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
11293}
11294
11295
11296/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
11297FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
11298{
11299 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11300 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
11301}
11302
11303
11304/* Opcode 0xf3 0x0f 0xe9 - invalid */
11305/* Opcode 0xf2 0x0f 0xe9 - invalid */
11306
11307
11308/** Opcode 0x0f 0xea - pminsw Pq, Qq */
11309FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
11310{
11311 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11312 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
11313}
11314
11315
11316/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
11317FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
11318{
11319 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11320 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
11321}
11322
11323
11324/* Opcode 0xf3 0x0f 0xea - invalid */
11325/* Opcode 0xf2 0x0f 0xea - invalid */
11326
11327
11328/** Opcode 0x0f 0xeb - por Pq, Qq */
11329FNIEMOP_DEF(iemOp_por_Pq_Qq)
11330{
11331 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11332 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
11333}
11334
11335
11336/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
11337FNIEMOP_DEF(iemOp_por_Vx_Wx)
11338{
11339 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11340 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
11341}
11342
11343
11344/* Opcode 0xf3 0x0f 0xeb - invalid */
11345/* Opcode 0xf2 0x0f 0xeb - invalid */
11346
11347/** Opcode 0x0f 0xec - paddsb Pq, Qq */
11348FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
11349{
11350 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11351 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
11352}
11353
11354
11355/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
11356FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
11357{
11358 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11359 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
11360}
11361
11362
11363/* Opcode 0xf3 0x0f 0xec - invalid */
11364/* Opcode 0xf2 0x0f 0xec - invalid */
11365
11366/** Opcode 0x0f 0xed - paddsw Pq, Qq */
11367FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
11368{
11369 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11370 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
11371}
11372
11373
11374/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
11375FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
11376{
11377 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11378 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
11379}
11380
11381
11382/* Opcode 0xf3 0x0f 0xed - invalid */
11383/* Opcode 0xf2 0x0f 0xed - invalid */
11384
11385
11386/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
11387FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
11388{
11389 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11390 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
11391}
11392
11393
11394/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
11395FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
11396{
11397 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11398 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
11399}
11400
11401
11402/* Opcode 0xf3 0x0f 0xee - invalid */
11403/* Opcode 0xf2 0x0f 0xee - invalid */
11404
11405
11406/** Opcode 0x0f 0xef - pxor Pq, Qq */
11407FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
11408{
11409 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11410 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
11411}
11412
11413
11414/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
11415FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
11416{
11417 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11418 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
11419}
11420
11421
11422/* Opcode 0xf3 0x0f 0xef - invalid */
11423/* Opcode 0xf2 0x0f 0xef - invalid */
11424
11425/* Opcode 0x0f 0xf0 - invalid */
11426/* Opcode 0x66 0x0f 0xf0 - invalid */
11427
11428
11429/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
11430FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
11431{
11432 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11434 if (IEM_IS_MODRM_REG_MODE(bRm))
11435 {
11436 /*
11437 * Register, register - (not implemented, assuming it raises \#UD).
11438 */
11439 return IEMOP_RAISE_INVALID_OPCODE();
11440 }
11441 else
11442 {
11443 /*
11444 * Register, memory.
11445 */
11446 IEM_MC_BEGIN(0, 2);
11447 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
11448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11449
11450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11452 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
11453 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
11454 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11455 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
11456
11457 IEM_MC_ADVANCE_RIP();
11458 IEM_MC_END();
11459 }
11460 return VINF_SUCCESS;
11461}
11462
11463
11464/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
11465FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
11466{
11467 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11468 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
11469}
11470
11471
11472/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
11473FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
11474{
11475 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11476 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
11477}
11478
11479
11480/* Opcode 0xf2 0x0f 0xf1 - invalid */
11481
11482/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
11483FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
11484{
11485 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11486 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
11487}
11488
11489
11490/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
11491FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
11492{
11493 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11494 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
11495}
11496
11497
11498/* Opcode 0xf2 0x0f 0xf2 - invalid */
11499
11500/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
11501FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
11502{
11503 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11504 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
11505}
11506
11507
11508/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
11509FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
11510{
11511 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11512 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
11513}
11514
11515/* Opcode 0xf2 0x0f 0xf3 - invalid */
11516
11517/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
11518FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
11519{
11520 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11521 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
11522}
11523
11524
11525/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
11526FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
11527{
11528 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11529 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
11530}
11531
11532
11533/* Opcode 0xf2 0x0f 0xf4 - invalid */
11534
11535/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
11536FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
11537{
11538 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11539 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
11540}
11541
11542
11543/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
11544FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
11545{
11546 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11547 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
11548}
11549
11550/* Opcode 0xf2 0x0f 0xf5 - invalid */
11551
11552/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
11553FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
11554{
11555 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
11556 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
11557}
11558
11559
11560/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
11561FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
11562{
11563 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11564 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
11565}
11566
11567
11568/* Opcode 0xf2 0x0f 0xf6 - invalid */
11569
11570/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
11571FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
11572/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
11573FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
11574/* Opcode 0xf2 0x0f 0xf7 - invalid */
11575
11576
11577/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
11578FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
11579{
11580 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11581 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
11582}
11583
11584
11585/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
11586FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
11587{
11588 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11589 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
11590}
11591
11592
11593/* Opcode 0xf2 0x0f 0xf8 - invalid */
11594
11595
11596/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
11597FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
11598{
11599 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11600 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
11601}
11602
11603
11604/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
11605FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
11606{
11607 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11608 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
11609}
11610
11611
11612/* Opcode 0xf2 0x0f 0xf9 - invalid */
11613
11614
11615/** Opcode 0x0f 0xfa - psubd Pq, Qq */
11616FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
11617{
11618 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11619 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
11620}
11621
11622
11623/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
11624FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
11625{
11626 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11627 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
11628}
11629
11630
11631/* Opcode 0xf2 0x0f 0xfa - invalid */
11632
11633
11634/** Opcode 0x0f 0xfb - psubq Pq, Qq */
11635FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
11636{
11637 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11638 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
11639}
11640
11641
11642/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
11643FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
11644{
11645 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11646 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
11647}
11648
11649
11650/* Opcode 0xf2 0x0f 0xfb - invalid */
11651
11652
11653/** Opcode 0x0f 0xfc - paddb Pq, Qq */
11654FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
11655{
11656 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11657 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
11658}
11659
11660
11661/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
11662FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
11663{
11664 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11665 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
11666}
11667
11668
11669/* Opcode 0xf2 0x0f 0xfc - invalid */
11670
11671
11672/** Opcode 0x0f 0xfd - paddw Pq, Qq */
11673FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
11674{
11675 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11676 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
11677}
11678
11679
11680/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
11681FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
11682{
11683 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11684 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
11685}
11686
11687
11688/* Opcode 0xf2 0x0f 0xfd - invalid */
11689
11690
11691/** Opcode 0x0f 0xfe - paddd Pq, Qq */
11692FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
11693{
11694 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11695 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
11696}
11697
11698
11699/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
11700FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
11701{
11702 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11703 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
11704}
11705
11706
11707/* Opcode 0xf2 0x0f 0xfe - invalid */
11708
11709
11710/** Opcode **** 0x0f 0xff - UD0 */
11711FNIEMOP_DEF(iemOp_ud0)
11712{
11713 IEMOP_MNEMONIC(ud0, "ud0");
11714 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
11715 {
11716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
11717#ifndef TST_IEM_CHECK_MC
11718 if (IEM_IS_MODRM_MEM_MODE(bRm))
11719 {
11720 RTGCPTR GCPtrEff;
11721 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
11722 if (rcStrict != VINF_SUCCESS)
11723 return rcStrict;
11724 }
11725#endif
11726 IEMOP_HLP_DONE_DECODING();
11727 }
11728 return IEMOP_RAISE_INVALID_OPCODE();
11729}
11730
11731
11732
11733/**
11734 * Two byte opcode map, first byte 0x0f.
11735 *
11736 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
11737 * check if it needs updating as well when making changes.
11738 */
11739IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
11740{
11741 /* no prefix, 066h prefix f3h prefix, f2h prefix */
11742 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
11743 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
11744 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
11745 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
11746 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
11747 /* 0x05 */ IEMOP_X4(iemOp_syscall),
11748 /* 0x06 */ IEMOP_X4(iemOp_clts),
11749 /* 0x07 */ IEMOP_X4(iemOp_sysret),
11750 /* 0x08 */ IEMOP_X4(iemOp_invd),
11751 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
11752 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
11753 /* 0x0b */ IEMOP_X4(iemOp_ud2),
11754 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
11755 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
11756 /* 0x0e */ IEMOP_X4(iemOp_femms),
11757 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
11758
11759 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
11760 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
11761 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
11762 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11763 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11764 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11765 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
11766 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11767 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
11768 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
11769 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
11770 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
11771 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
11772 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
11773 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
11774 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
11775
11776 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
11777 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
11778 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
11779 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
11780 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
11781 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11782 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
11783 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11784 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11785 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11786 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
11787 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11788 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
11789 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
11790 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11791 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11792
11793 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
11794 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
11795 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
11796 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
11797 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
11798 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
11799 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
11800 /* 0x37 */ IEMOP_X4(iemOp_getsec),
11801 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
11802 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11803 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
11804 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11805 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11806 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11807 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11808 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11809
11810 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
11811 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
11812 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
11813 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
11814 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
11815 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
11816 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
11817 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
11818 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
11819 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
11820 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
11821 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
11822 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
11823 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
11824 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
11825 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
11826
11827 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11828 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
11829 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
11830 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
11831 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11832 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11833 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11834 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11835 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
11836 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
11837 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
11838 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
11839 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
11840 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
11841 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
11842 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
11843
11844 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11845 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11846 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11847 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11848 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11849 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11850 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11851 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11852 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11853 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11854 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11855 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11856 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11857 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11858 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11859 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
11860
11861 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
11862 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
11863 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
11864 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
11865 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11866 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11867 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11868 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11869
11870 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11871 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11872 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11873 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11874 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
11875 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
11876 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
11877 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
11878
11879 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
11880 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
11881 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
11882 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
11883 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
11884 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
11885 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
11886 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
11887 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
11888 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
11889 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
11890 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
11891 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
11892 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
11893 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
11894 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
11895
11896 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
11897 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
11898 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
11899 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
11900 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
11901 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
11902 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
11903 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
11904 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
11905 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
11906 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
11907 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
11908 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
11909 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
11910 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
11911 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
11912
11913 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
11914 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
11915 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
11916 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
11917 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
11918 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
11919 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
11920 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
11921 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
11922 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
11923 /* 0xaa */ IEMOP_X4(iemOp_rsm),
11924 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
11925 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
11926 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
11927 /* 0xae */ IEMOP_X4(iemOp_Grp15),
11928 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
11929
11930 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
11931 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
11932 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
11933 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
11934 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
11935 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
11936 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
11937 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
11938 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
11939 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
11940 /* 0xba */ IEMOP_X4(iemOp_Grp8),
11941 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
11942 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
11943 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
11944 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
11945 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
11946
11947 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
11948 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
11949 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
11950 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11951 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11952 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11953 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11954 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
11955 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
11956 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
11957 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
11958 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
11959 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
11960 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
11961 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
11962 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
11963
11964 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
11965 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11966 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11967 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11968 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11969 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11970 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
11971 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11972 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11973 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11974 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11975 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11976 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11977 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11978 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11979 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11980
11981 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11982 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11983 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11984 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11985 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11986 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11987 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
11988 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11989 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11990 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11991 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11992 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11993 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11994 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11995 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11996 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11997
11998 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
11999 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12000 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12001 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12002 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12003 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12004 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12005 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12006 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12007 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12008 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12009 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12010 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12011 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12012 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
12013 /* 0xff */ IEMOP_X4(iemOp_ud0),
12014};
12015AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
12016
12017/** @} */
12018
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette