VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96333

Last change on this file since 96333 was 96333, checked in by vboxsync, 2 years ago

VMM/IEM: Implement divps/divpd instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 383.0 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96333 2022-08-19 11:37:07Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * for instructions introduced with SSE.
149 */
150FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for MMX instructions on the form:
205 * pxxx mm1, mm2/mem64
206 * for instructions introduced with SSE.
207 *
208 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
209 * no FXSAVE state, just the operands.
210 */
211FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
212{
213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
214 if (IEM_IS_MODRM_REG_MODE(bRm))
215 {
216 /*
217 * Register, register.
218 */
219 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
220 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
222 IEM_MC_BEGIN(2, 0);
223 IEM_MC_ARG(uint64_t *, pDst, 0);
224 IEM_MC_ARG(uint64_t const *, pSrc, 1);
225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
226 IEM_MC_PREPARE_FPU_USAGE();
227 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
228 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
229 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
230 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
231 IEM_MC_FPU_TO_MMX_MODE();
232 IEM_MC_ADVANCE_RIP();
233 IEM_MC_END();
234 }
235 else
236 {
237 /*
238 * Register, memory.
239 */
240 IEM_MC_BEGIN(2, 2);
241 IEM_MC_ARG(uint64_t *, pDst, 0);
242 IEM_MC_LOCAL(uint64_t, uSrc);
243 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250
251 IEM_MC_PREPARE_FPU_USAGE();
252 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
253 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
254 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
255 IEM_MC_FPU_TO_MMX_MODE();
256
257 IEM_MC_ADVANCE_RIP();
258 IEM_MC_END();
259 }
260 return VINF_SUCCESS;
261}
262
263
264/**
265 * Common worker for MMX instructions on the form:
266 * pxxx mm1, mm2/mem64
267 * that was introduced with SSE2.
268 */
269FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
278 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
280 IEM_MC_BEGIN(2, 0);
281 IEM_MC_ARG(uint64_t *, pDst, 0);
282 IEM_MC_ARG(uint64_t const *, pSrc, 1);
283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
284 IEM_MC_PREPARE_FPU_USAGE();
285 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
286 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
287 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
288 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
289 IEM_MC_FPU_TO_MMX_MODE();
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 else
294 {
295 /*
296 * Register, memory.
297 */
298 IEM_MC_BEGIN(2, 2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_LOCAL(uint64_t, uSrc);
301 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
303
304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
307 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
308
309 IEM_MC_PREPARE_FPU_USAGE();
310 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313 IEM_MC_FPU_TO_MMX_MODE();
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 }
318 return VINF_SUCCESS;
319}
320
321
322/**
323 * Common worker for SSE2 instructions on the forms:
324 * pxxx xmm1, xmm2/mem128
325 *
326 * Proper alignment of the 128-bit operand is enforced.
327 * Exceptions type 4. SSE2 cpuid checks.
328 *
329 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
330 */
331FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
332{
333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
334 if (IEM_IS_MODRM_REG_MODE(bRm))
335 {
336 /*
337 * Register, register.
338 */
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEM_MC_BEGIN(2, 0);
341 IEM_MC_ARG(PRTUINT128U, pDst, 0);
342 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
344 IEM_MC_PREPARE_SSE_USAGE();
345 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
346 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
347 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 }
351 else
352 {
353 /*
354 * Register, memory.
355 */
356 IEM_MC_BEGIN(2, 2);
357 IEM_MC_ARG(PRTUINT128U, pDst, 0);
358 IEM_MC_LOCAL(RTUINT128U, uSrc);
359 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
361
362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
365 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366
367 IEM_MC_PREPARE_SSE_USAGE();
368 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
369 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
370
371 IEM_MC_ADVANCE_RIP();
372 IEM_MC_END();
373 }
374 return VINF_SUCCESS;
375}
376
377
378/**
379 * Common worker for SSE2 instructions on the forms:
380 * pxxx xmm1, xmm2/mem128
381 *
382 * Proper alignment of the 128-bit operand is enforced.
383 * Exceptions type 4. SSE2 cpuid checks.
384 *
385 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
386 * no FXSAVE state, just the operands.
387 *
388 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
389 */
390FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
391{
392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
393 if (IEM_IS_MODRM_REG_MODE(bRm))
394 {
395 /*
396 * Register, register.
397 */
398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
399 IEM_MC_BEGIN(2, 0);
400 IEM_MC_ARG(PRTUINT128U, pDst, 0);
401 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
403 IEM_MC_PREPARE_SSE_USAGE();
404 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
405 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
406 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 }
410 else
411 {
412 /*
413 * Register, memory.
414 */
415 IEM_MC_BEGIN(2, 2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_LOCAL(RTUINT128U, uSrc);
418 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
420
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
425
426 IEM_MC_PREPARE_SSE_USAGE();
427 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
428 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
429
430 IEM_MC_ADVANCE_RIP();
431 IEM_MC_END();
432 }
433 return VINF_SUCCESS;
434}
435
436
437/**
438 * Common worker for MMX instructions on the forms:
439 * pxxxx mm1, mm2/mem32
440 *
441 * The 2nd operand is the first half of a register, which in the memory case
442 * means a 32-bit memory access.
443 */
444FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
445{
446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
447 if (IEM_IS_MODRM_REG_MODE(bRm))
448 {
449 /*
450 * Register, register.
451 */
452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
453 IEM_MC_BEGIN(2, 0);
454 IEM_MC_ARG(uint64_t *, puDst, 0);
455 IEM_MC_ARG(uint64_t const *, puSrc, 1);
456 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
457 IEM_MC_PREPARE_FPU_USAGE();
458 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
459 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
460 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
461 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
462 IEM_MC_FPU_TO_MMX_MODE();
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 }
466 else
467 {
468 /*
469 * Register, memory.
470 */
471 IEM_MC_BEGIN(2, 2);
472 IEM_MC_ARG(uint64_t *, puDst, 0);
473 IEM_MC_LOCAL(uint64_t, uSrc);
474 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
476
477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
479 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
480 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
481
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
484 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
485 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
486 IEM_MC_FPU_TO_MMX_MODE();
487
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 return VINF_SUCCESS;
492}
493
494
495/**
496 * Common worker for SSE instructions on the forms:
497 * pxxxx xmm1, xmm2/mem128
498 *
499 * The 2nd operand is the first half of a register, which in the memory case
500 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
501 *
502 * Exceptions type 4.
503 */
504FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
505{
506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
507 if (IEM_IS_MODRM_REG_MODE(bRm))
508 {
509 /*
510 * Register, register.
511 */
512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
513 IEM_MC_BEGIN(2, 0);
514 IEM_MC_ARG(PRTUINT128U, puDst, 0);
515 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
518 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
519 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
520 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
521 IEM_MC_ADVANCE_RIP();
522 IEM_MC_END();
523 }
524 else
525 {
526 /*
527 * Register, memory.
528 */
529 IEM_MC_BEGIN(2, 2);
530 IEM_MC_ARG(PRTUINT128U, puDst, 0);
531 IEM_MC_LOCAL(RTUINT128U, uSrc);
532 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
538 /** @todo Most CPUs probably only read the low qword. We read everything to
539 * make sure we apply segmentation and alignment checks correctly.
540 * When we have time, it would be interesting to explore what real
541 * CPUs actually does and whether it will do a TLB load for the high
542 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
543 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
544
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
548
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 return VINF_SUCCESS;
553}
554
555
556/**
557 * Common worker for SSE2 instructions on the forms:
558 * pxxxx xmm1, xmm2/mem128
559 *
560 * The 2nd operand is the first half of a register, which in the memory case
561 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
562 *
563 * Exceptions type 4.
564 */
565FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
566{
567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
568 if (IEM_IS_MODRM_REG_MODE(bRm))
569 {
570 /*
571 * Register, register.
572 */
573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
574 IEM_MC_BEGIN(2, 0);
575 IEM_MC_ARG(PRTUINT128U, puDst, 0);
576 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
579 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
580 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
581 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 }
585 else
586 {
587 /*
588 * Register, memory.
589 */
590 IEM_MC_BEGIN(2, 2);
591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
592 IEM_MC_LOCAL(RTUINT128U, uSrc);
593 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
595
596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
599 /** @todo Most CPUs probably only read the low qword. We read everything to
600 * make sure we apply segmentation and alignment checks correctly.
601 * When we have time, it would be interesting to explore what real
602 * CPUs actually does and whether it will do a TLB load for the high
603 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
604 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
605
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
609
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 return VINF_SUCCESS;
614}
615
616
617/**
618 * Common worker for MMX instructions on the form:
619 * pxxxx mm1, mm2/mem64
620 *
621 * The 2nd operand is the second half of a register, which in the memory case
622 * means a 64-bit memory access for MMX.
623 */
624FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
625{
626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
627 if (IEM_IS_MODRM_REG_MODE(bRm))
628 {
629 /*
630 * Register, register.
631 */
632 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
633 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
635 IEM_MC_BEGIN(2, 0);
636 IEM_MC_ARG(uint64_t *, puDst, 0);
637 IEM_MC_ARG(uint64_t const *, puSrc, 1);
638 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
639 IEM_MC_PREPARE_FPU_USAGE();
640 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
641 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
642 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
643 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
644 IEM_MC_FPU_TO_MMX_MODE();
645 IEM_MC_ADVANCE_RIP();
646 IEM_MC_END();
647 }
648 else
649 {
650 /*
651 * Register, memory.
652 */
653 IEM_MC_BEGIN(2, 2);
654 IEM_MC_ARG(uint64_t *, puDst, 0);
655 IEM_MC_LOCAL(uint64_t, uSrc);
656 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
658
659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
661 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
662 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
663
664 IEM_MC_PREPARE_FPU_USAGE();
665 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
666 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
667 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_ADVANCE_RIP();
671 IEM_MC_END();
672 }
673 return VINF_SUCCESS;
674}
675
676
677/**
678 * Common worker for SSE instructions on the form:
679 * pxxxx xmm1, xmm2/mem128
680 *
681 * The 2nd operand is the second half of a register, which for SSE a 128-bit
682 * aligned access where it may read the full 128 bits or only the upper 64 bits.
683 *
684 * Exceptions type 4.
685 */
686FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
687{
688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
689 if (IEM_IS_MODRM_REG_MODE(bRm))
690 {
691 /*
692 * Register, register.
693 */
694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
695 IEM_MC_BEGIN(2, 0);
696 IEM_MC_ARG(PRTUINT128U, puDst, 0);
697 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
699 IEM_MC_PREPARE_SSE_USAGE();
700 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
701 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
702 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
703 IEM_MC_ADVANCE_RIP();
704 IEM_MC_END();
705 }
706 else
707 {
708 /*
709 * Register, memory.
710 */
711 IEM_MC_BEGIN(2, 2);
712 IEM_MC_ARG(PRTUINT128U, puDst, 0);
713 IEM_MC_LOCAL(RTUINT128U, uSrc);
714 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
716
717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
720 /** @todo Most CPUs probably only read the high qword. We read everything to
721 * make sure we apply segmentation and alignment checks correctly.
722 * When we have time, it would be interesting to explore what real
723 * CPUs actually does and whether it will do a TLB load for the lower
724 * part or skip any associated \#PF. */
725 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
726
727 IEM_MC_PREPARE_SSE_USAGE();
728 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
729 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
730
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 return VINF_SUCCESS;
735}
736
737
738/**
739 * Common worker for SSE instructions on the forms:
740 * pxxs xmm1, xmm2/mem128
741 *
742 * Proper alignment of the 128-bit operand is enforced.
743 * Exceptions type 2. SSE cpuid checks.
744 *
745 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
746 */
747FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
748{
749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
750 if (IEM_IS_MODRM_REG_MODE(bRm))
751 {
752 /*
753 * Register, register.
754 */
755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
758 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
759 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
760 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
761 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
762 IEM_MC_PREPARE_SSE_USAGE();
763 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
764 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
765 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
766 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
767 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
768
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 else
773 {
774 /*
775 * Register, memory.
776 */
777 IEM_MC_BEGIN(3, 2);
778 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
779 IEM_MC_LOCAL(X86XMMREG, uSrc2);
780 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
781 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
782 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
788 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789
790 IEM_MC_PREPARE_SSE_USAGE();
791 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
795
796 IEM_MC_ADVANCE_RIP();
797 IEM_MC_END();
798 }
799 return VINF_SUCCESS;
800}
801
802
803/**
804 * Common worker for SSE2 instructions on the forms:
805 * pxxd xmm1, xmm2/mem128
806 *
807 * Proper alignment of the 128-bit operand is enforced.
808 * Exceptions type 2. SSE cpuid checks.
809 *
810 * @sa iemOpCommonSseFp_FullFull_To_Full
811 */
812FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
813{
814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
815 if (IEM_IS_MODRM_REG_MODE(bRm))
816 {
817 /*
818 * Register, register.
819 */
820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
821 IEM_MC_BEGIN(3, 1);
822 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
823 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
824 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
825 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
826 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
827 IEM_MC_PREPARE_SSE_USAGE();
828 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
829 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
830 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
831 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
832 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
833
834 IEM_MC_ADVANCE_RIP();
835 IEM_MC_END();
836 }
837 else
838 {
839 /*
840 * Register, memory.
841 */
842 IEM_MC_BEGIN(3, 2);
843 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
844 IEM_MC_LOCAL(X86XMMREG, uSrc2);
845 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
846 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
847 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
849
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
853 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854
855 IEM_MC_PREPARE_SSE_USAGE();
856 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
857 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
858 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
859 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
860
861 IEM_MC_ADVANCE_RIP();
862 IEM_MC_END();
863 }
864 return VINF_SUCCESS;
865}
866
867
868/**
869 * Common worker for SSE2 instructions on the form:
870 * pxxxx xmm1, xmm2/mem128
871 *
872 * The 2nd operand is the second half of a register, which for SSE a 128-bit
873 * aligned access where it may read the full 128 bits or only the upper 64 bits.
874 *
875 * Exceptions type 4.
876 */
877FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
878{
879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
880 if (IEM_IS_MODRM_REG_MODE(bRm))
881 {
882 /*
883 * Register, register.
884 */
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_BEGIN(2, 0);
887 IEM_MC_ARG(PRTUINT128U, puDst, 0);
888 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
889 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
890 IEM_MC_PREPARE_SSE_USAGE();
891 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
892 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
893 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
894 IEM_MC_ADVANCE_RIP();
895 IEM_MC_END();
896 }
897 else
898 {
899 /*
900 * Register, memory.
901 */
902 IEM_MC_BEGIN(2, 2);
903 IEM_MC_ARG(PRTUINT128U, puDst, 0);
904 IEM_MC_LOCAL(RTUINT128U, uSrc);
905 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
907
908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
911 /** @todo Most CPUs probably only read the high qword. We read everything to
912 * make sure we apply segmentation and alignment checks correctly.
913 * When we have time, it would be interesting to explore what real
914 * CPUs actually does and whether it will do a TLB load for the lower
915 * part or skip any associated \#PF. */
916 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
917
918 IEM_MC_PREPARE_SSE_USAGE();
919 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
920 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
921
922 IEM_MC_ADVANCE_RIP();
923 IEM_MC_END();
924 }
925 return VINF_SUCCESS;
926}
927
928
929/** Opcode 0x0f 0x00 /0. */
930FNIEMOPRM_DEF(iemOp_Grp6_sldt)
931{
932 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
933 IEMOP_HLP_MIN_286();
934 IEMOP_HLP_NO_REAL_OR_V86_MODE();
935
936 if (IEM_IS_MODRM_REG_MODE(bRm))
937 {
938 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
939 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
940 }
941
942 /* Ignore operand size here, memory refs are always 16-bit. */
943 IEM_MC_BEGIN(2, 0);
944 IEM_MC_ARG(uint16_t, iEffSeg, 0);
945 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
947 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
948 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
949 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
950 IEM_MC_END();
951 return VINF_SUCCESS;
952}
953
954
955/** Opcode 0x0f 0x00 /1. */
956FNIEMOPRM_DEF(iemOp_Grp6_str)
957{
958 IEMOP_MNEMONIC(str, "str Rv/Mw");
959 IEMOP_HLP_MIN_286();
960 IEMOP_HLP_NO_REAL_OR_V86_MODE();
961
962
963 if (IEM_IS_MODRM_REG_MODE(bRm))
964 {
965 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
966 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
967 }
968
969 /* Ignore operand size here, memory refs are always 16-bit. */
970 IEM_MC_BEGIN(2, 0);
971 IEM_MC_ARG(uint16_t, iEffSeg, 0);
972 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
974 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
975 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
976 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
977 IEM_MC_END();
978 return VINF_SUCCESS;
979}
980
981
982/** Opcode 0x0f 0x00 /2. */
983FNIEMOPRM_DEF(iemOp_Grp6_lldt)
984{
985 IEMOP_MNEMONIC(lldt, "lldt Ew");
986 IEMOP_HLP_MIN_286();
987 IEMOP_HLP_NO_REAL_OR_V86_MODE();
988
989 if (IEM_IS_MODRM_REG_MODE(bRm))
990 {
991 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
992 IEM_MC_BEGIN(1, 0);
993 IEM_MC_ARG(uint16_t, u16Sel, 0);
994 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
995 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
996 IEM_MC_END();
997 }
998 else
999 {
1000 IEM_MC_BEGIN(1, 1);
1001 IEM_MC_ARG(uint16_t, u16Sel, 0);
1002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1004 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1005 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1006 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1007 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1008 IEM_MC_END();
1009 }
1010 return VINF_SUCCESS;
1011}
1012
1013
1014/** Opcode 0x0f 0x00 /3. */
1015FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1016{
1017 IEMOP_MNEMONIC(ltr, "ltr Ew");
1018 IEMOP_HLP_MIN_286();
1019 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1020
1021 if (IEM_IS_MODRM_REG_MODE(bRm))
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1024 IEM_MC_BEGIN(1, 0);
1025 IEM_MC_ARG(uint16_t, u16Sel, 0);
1026 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1027 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1028 IEM_MC_END();
1029 }
1030 else
1031 {
1032 IEM_MC_BEGIN(1, 1);
1033 IEM_MC_ARG(uint16_t, u16Sel, 0);
1034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1037 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1038 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1039 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1040 IEM_MC_END();
1041 }
1042 return VINF_SUCCESS;
1043}
1044
1045
1046/** Opcode 0x0f 0x00 /3. */
1047FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1048{
1049 IEMOP_HLP_MIN_286();
1050 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1051
1052 if (IEM_IS_MODRM_REG_MODE(bRm))
1053 {
1054 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1055 IEM_MC_BEGIN(2, 0);
1056 IEM_MC_ARG(uint16_t, u16Sel, 0);
1057 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1058 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1059 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1060 IEM_MC_END();
1061 }
1062 else
1063 {
1064 IEM_MC_BEGIN(2, 1);
1065 IEM_MC_ARG(uint16_t, u16Sel, 0);
1066 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1069 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1070 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1071 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1072 IEM_MC_END();
1073 }
1074 return VINF_SUCCESS;
1075}
1076
1077
1078/** Opcode 0x0f 0x00 /4. */
1079FNIEMOPRM_DEF(iemOp_Grp6_verr)
1080{
1081 IEMOP_MNEMONIC(verr, "verr Ew");
1082 IEMOP_HLP_MIN_286();
1083 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1084}
1085
1086
1087/** Opcode 0x0f 0x00 /5. */
1088FNIEMOPRM_DEF(iemOp_Grp6_verw)
1089{
1090 IEMOP_MNEMONIC(verw, "verw Ew");
1091 IEMOP_HLP_MIN_286();
1092 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1093}
1094
1095
1096/**
1097 * Group 6 jump table.
1098 */
1099IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1100{
1101 iemOp_Grp6_sldt,
1102 iemOp_Grp6_str,
1103 iemOp_Grp6_lldt,
1104 iemOp_Grp6_ltr,
1105 iemOp_Grp6_verr,
1106 iemOp_Grp6_verw,
1107 iemOp_InvalidWithRM,
1108 iemOp_InvalidWithRM
1109};
1110
1111/** Opcode 0x0f 0x00. */
1112FNIEMOP_DEF(iemOp_Grp6)
1113{
1114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1115 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1116}
1117
1118
1119/** Opcode 0x0f 0x01 /0. */
1120FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1121{
1122 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1123 IEMOP_HLP_MIN_286();
1124 IEMOP_HLP_64BIT_OP_SIZE();
1125 IEM_MC_BEGIN(2, 1);
1126 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1127 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1130 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1131 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1132 IEM_MC_END();
1133 return VINF_SUCCESS;
1134}
1135
1136
1137/** Opcode 0x0f 0x01 /0. */
1138FNIEMOP_DEF(iemOp_Grp7_vmcall)
1139{
1140 IEMOP_MNEMONIC(vmcall, "vmcall");
1141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1142
1143 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1144 want all hypercalls regardless of instruction used, and if a
1145 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1146 (NEM/win makes ASSUMPTIONS about this behavior.) */
1147 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1148}
1149
1150
1151/** Opcode 0x0f 0x01 /0. */
1152#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1153FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1154{
1155 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1156 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1157 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1158 IEMOP_HLP_DONE_DECODING();
1159 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1160}
1161#else
1162FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1163{
1164 IEMOP_BITCH_ABOUT_STUB();
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166}
1167#endif
1168
1169
1170/** Opcode 0x0f 0x01 /0. */
1171#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1172FNIEMOP_DEF(iemOp_Grp7_vmresume)
1173{
1174 IEMOP_MNEMONIC(vmresume, "vmresume");
1175 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1176 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1177 IEMOP_HLP_DONE_DECODING();
1178 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1179}
1180#else
1181FNIEMOP_DEF(iemOp_Grp7_vmresume)
1182{
1183 IEMOP_BITCH_ABOUT_STUB();
1184 return IEMOP_RAISE_INVALID_OPCODE();
1185}
1186#endif
1187
1188
1189/** Opcode 0x0f 0x01 /0. */
1190#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1191FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1192{
1193 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1194 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1195 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1196 IEMOP_HLP_DONE_DECODING();
1197 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1198}
1199#else
1200FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1201{
1202 IEMOP_BITCH_ABOUT_STUB();
1203 return IEMOP_RAISE_INVALID_OPCODE();
1204}
1205#endif
1206
1207
1208/** Opcode 0x0f 0x01 /1. */
1209FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1210{
1211 IEMOP_MNEMONIC(sidt, "sidt Ms");
1212 IEMOP_HLP_MIN_286();
1213 IEMOP_HLP_64BIT_OP_SIZE();
1214 IEM_MC_BEGIN(2, 1);
1215 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1216 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1219 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1220 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1221 IEM_MC_END();
1222 return VINF_SUCCESS;
1223}
1224
1225
1226/** Opcode 0x0f 0x01 /1. */
1227FNIEMOP_DEF(iemOp_Grp7_monitor)
1228{
1229 IEMOP_MNEMONIC(monitor, "monitor");
1230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1231 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1232}
1233
1234
1235/** Opcode 0x0f 0x01 /1. */
1236FNIEMOP_DEF(iemOp_Grp7_mwait)
1237{
1238 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1240 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1241}
1242
1243
1244/** Opcode 0x0f 0x01 /2. */
1245FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1246{
1247 IEMOP_MNEMONIC(lgdt, "lgdt");
1248 IEMOP_HLP_64BIT_OP_SIZE();
1249 IEM_MC_BEGIN(3, 1);
1250 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1251 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1252 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1255 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1256 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1257 IEM_MC_END();
1258 return VINF_SUCCESS;
1259}
1260
1261
1262/** Opcode 0x0f 0x01 0xd0. */
1263FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1264{
1265 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1266 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1267 {
1268 /** @todo r=ramshankar: We should use
1269 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1270 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1271 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1272 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1273 }
1274 return IEMOP_RAISE_INVALID_OPCODE();
1275}
1276
1277
1278/** Opcode 0x0f 0x01 0xd1. */
1279FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1280{
1281 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1282 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1283 {
1284 /** @todo r=ramshankar: We should use
1285 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1286 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1287 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1288 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1289 }
1290 return IEMOP_RAISE_INVALID_OPCODE();
1291}
1292
1293
1294/** Opcode 0x0f 0x01 /3. */
1295FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1296{
1297 IEMOP_MNEMONIC(lidt, "lidt");
1298 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1299 ? IEMMODE_64BIT
1300 : pVCpu->iem.s.enmEffOpSize;
1301 IEM_MC_BEGIN(3, 1);
1302 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1303 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1304 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1307 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1308 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1309 IEM_MC_END();
1310 return VINF_SUCCESS;
1311}
1312
1313
1314/** Opcode 0x0f 0x01 0xd8. */
1315#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1316FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1317{
1318 IEMOP_MNEMONIC(vmrun, "vmrun");
1319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1321}
1322#else
1323FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1324#endif
1325
1326/** Opcode 0x0f 0x01 0xd9. */
1327FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1328{
1329 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1331
1332 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1333 want all hypercalls regardless of instruction used, and if a
1334 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1335 (NEM/win makes ASSUMPTIONS about this behavior.) */
1336 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1337}
1338
1339/** Opcode 0x0f 0x01 0xda. */
1340#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1341FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1342{
1343 IEMOP_MNEMONIC(vmload, "vmload");
1344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1345 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1346}
1347#else
1348FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1349#endif
1350
1351
1352/** Opcode 0x0f 0x01 0xdb. */
1353#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1354FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1355{
1356 IEMOP_MNEMONIC(vmsave, "vmsave");
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1358 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1359}
1360#else
1361FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1362#endif
1363
1364
1365/** Opcode 0x0f 0x01 0xdc. */
1366#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1367FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1368{
1369 IEMOP_MNEMONIC(stgi, "stgi");
1370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1371 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1372}
1373#else
1374FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1375#endif
1376
1377
1378/** Opcode 0x0f 0x01 0xdd. */
1379#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1380FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1381{
1382 IEMOP_MNEMONIC(clgi, "clgi");
1383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1384 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1385}
1386#else
1387FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1388#endif
1389
1390
1391/** Opcode 0x0f 0x01 0xdf. */
1392#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1393FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1394{
1395 IEMOP_MNEMONIC(invlpga, "invlpga");
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1397 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1398}
1399#else
1400FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1401#endif
1402
1403
1404/** Opcode 0x0f 0x01 0xde. */
1405#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1406FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1407{
1408 IEMOP_MNEMONIC(skinit, "skinit");
1409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1411}
1412#else
1413FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1414#endif
1415
1416
1417/** Opcode 0x0f 0x01 /4. */
1418FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1419{
1420 IEMOP_MNEMONIC(smsw, "smsw");
1421 IEMOP_HLP_MIN_286();
1422 if (IEM_IS_MODRM_REG_MODE(bRm))
1423 {
1424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1425 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1426 }
1427
1428 /* Ignore operand size here, memory refs are always 16-bit. */
1429 IEM_MC_BEGIN(2, 0);
1430 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1431 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1435 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1436 IEM_MC_END();
1437 return VINF_SUCCESS;
1438}
1439
1440
1441/** Opcode 0x0f 0x01 /6. */
1442FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1443{
1444 /* The operand size is effectively ignored, all is 16-bit and only the
1445 lower 3-bits are used. */
1446 IEMOP_MNEMONIC(lmsw, "lmsw");
1447 IEMOP_HLP_MIN_286();
1448 if (IEM_IS_MODRM_REG_MODE(bRm))
1449 {
1450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1451 IEM_MC_BEGIN(2, 0);
1452 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1453 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1454 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1455 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1456 IEM_MC_END();
1457 }
1458 else
1459 {
1460 IEM_MC_BEGIN(2, 0);
1461 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1462 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1465 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1466 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1467 IEM_MC_END();
1468 }
1469 return VINF_SUCCESS;
1470}
1471
1472
1473/** Opcode 0x0f 0x01 /7. */
1474FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1475{
1476 IEMOP_MNEMONIC(invlpg, "invlpg");
1477 IEMOP_HLP_MIN_486();
1478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1479 IEM_MC_BEGIN(1, 1);
1480 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1482 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1483 IEM_MC_END();
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/** Opcode 0x0f 0x01 /7. */
1489FNIEMOP_DEF(iemOp_Grp7_swapgs)
1490{
1491 IEMOP_MNEMONIC(swapgs, "swapgs");
1492 IEMOP_HLP_ONLY_64BIT();
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1495}
1496
1497
1498/** Opcode 0x0f 0x01 /7. */
1499FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1500{
1501 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1504}
1505
1506
1507/**
1508 * Group 7 jump table, memory variant.
1509 */
1510IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1511{
1512 iemOp_Grp7_sgdt,
1513 iemOp_Grp7_sidt,
1514 iemOp_Grp7_lgdt,
1515 iemOp_Grp7_lidt,
1516 iemOp_Grp7_smsw,
1517 iemOp_InvalidWithRM,
1518 iemOp_Grp7_lmsw,
1519 iemOp_Grp7_invlpg
1520};
1521
1522
1523/** Opcode 0x0f 0x01. */
1524FNIEMOP_DEF(iemOp_Grp7)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if (IEM_IS_MODRM_MEM_MODE(bRm))
1528 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1529
1530 switch (IEM_GET_MODRM_REG_8(bRm))
1531 {
1532 case 0:
1533 switch (IEM_GET_MODRM_RM_8(bRm))
1534 {
1535 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1536 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1537 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1538 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1539 }
1540 return IEMOP_RAISE_INVALID_OPCODE();
1541
1542 case 1:
1543 switch (IEM_GET_MODRM_RM_8(bRm))
1544 {
1545 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1546 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1547 }
1548 return IEMOP_RAISE_INVALID_OPCODE();
1549
1550 case 2:
1551 switch (IEM_GET_MODRM_RM_8(bRm))
1552 {
1553 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1554 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1555 }
1556 return IEMOP_RAISE_INVALID_OPCODE();
1557
1558 case 3:
1559 switch (IEM_GET_MODRM_RM_8(bRm))
1560 {
1561 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1562 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1563 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1564 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1565 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1566 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1567 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1568 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1570 }
1571
1572 case 4:
1573 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1574
1575 case 5:
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577
1578 case 6:
1579 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1580
1581 case 7:
1582 switch (IEM_GET_MODRM_RM_8(bRm))
1583 {
1584 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1585 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1586 }
1587 return IEMOP_RAISE_INVALID_OPCODE();
1588
1589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1590 }
1591}
1592
1593/** Opcode 0x0f 0x00 /3. */
1594FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1595{
1596 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598
1599 if (IEM_IS_MODRM_REG_MODE(bRm))
1600 {
1601 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1602 switch (pVCpu->iem.s.enmEffOpSize)
1603 {
1604 case IEMMODE_16BIT:
1605 {
1606 IEM_MC_BEGIN(3, 0);
1607 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1608 IEM_MC_ARG(uint16_t, u16Sel, 1);
1609 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1610
1611 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1612 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1613 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1614
1615 IEM_MC_END();
1616 return VINF_SUCCESS;
1617 }
1618
1619 case IEMMODE_32BIT:
1620 case IEMMODE_64BIT:
1621 {
1622 IEM_MC_BEGIN(3, 0);
1623 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1624 IEM_MC_ARG(uint16_t, u16Sel, 1);
1625 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1626
1627 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1628 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1629 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1630
1631 IEM_MC_END();
1632 return VINF_SUCCESS;
1633 }
1634
1635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1636 }
1637 }
1638 else
1639 {
1640 switch (pVCpu->iem.s.enmEffOpSize)
1641 {
1642 case IEMMODE_16BIT:
1643 {
1644 IEM_MC_BEGIN(3, 1);
1645 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1646 IEM_MC_ARG(uint16_t, u16Sel, 1);
1647 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1649
1650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1651 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1652
1653 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1654 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1655 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1656
1657 IEM_MC_END();
1658 return VINF_SUCCESS;
1659 }
1660
1661 case IEMMODE_32BIT:
1662 case IEMMODE_64BIT:
1663 {
1664 IEM_MC_BEGIN(3, 1);
1665 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1666 IEM_MC_ARG(uint16_t, u16Sel, 1);
1667 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1669
1670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1671 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1672/** @todo testcase: make sure it's a 16-bit read. */
1673
1674 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1675 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1676 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1677
1678 IEM_MC_END();
1679 return VINF_SUCCESS;
1680 }
1681
1682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1683 }
1684 }
1685}
1686
1687
1688
1689/** Opcode 0x0f 0x02. */
1690FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1691{
1692 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1693 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1694}
1695
1696
1697/** Opcode 0x0f 0x03. */
1698FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1699{
1700 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1701 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1702}
1703
1704
1705/** Opcode 0x0f 0x05. */
1706FNIEMOP_DEF(iemOp_syscall)
1707{
1708 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1710 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1711}
1712
1713
1714/** Opcode 0x0f 0x06. */
1715FNIEMOP_DEF(iemOp_clts)
1716{
1717 IEMOP_MNEMONIC(clts, "clts");
1718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1719 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1720}
1721
1722
1723/** Opcode 0x0f 0x07. */
1724FNIEMOP_DEF(iemOp_sysret)
1725{
1726 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1728 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1729}
1730
1731
1732/** Opcode 0x0f 0x08. */
1733FNIEMOP_DEF(iemOp_invd)
1734{
1735 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1736 IEMOP_HLP_MIN_486();
1737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1738 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1739}
1740
1741
1742/** Opcode 0x0f 0x09. */
1743FNIEMOP_DEF(iemOp_wbinvd)
1744{
1745 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1746 IEMOP_HLP_MIN_486();
1747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1748 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1749}
1750
1751
1752/** Opcode 0x0f 0x0b. */
1753FNIEMOP_DEF(iemOp_ud2)
1754{
1755 IEMOP_MNEMONIC(ud2, "ud2");
1756 return IEMOP_RAISE_INVALID_OPCODE();
1757}
1758
1759/** Opcode 0x0f 0x0d. */
1760FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1761{
1762 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1763 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1764 {
1765 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767 }
1768
1769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1770 if (IEM_IS_MODRM_REG_MODE(bRm))
1771 {
1772 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1773 return IEMOP_RAISE_INVALID_OPCODE();
1774 }
1775
1776 switch (IEM_GET_MODRM_REG_8(bRm))
1777 {
1778 case 2: /* Aliased to /0 for the time being. */
1779 case 4: /* Aliased to /0 for the time being. */
1780 case 5: /* Aliased to /0 for the time being. */
1781 case 6: /* Aliased to /0 for the time being. */
1782 case 7: /* Aliased to /0 for the time being. */
1783 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1784 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1785 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1787 }
1788
1789 IEM_MC_BEGIN(0, 1);
1790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1793 /* Currently a NOP. */
1794 NOREF(GCPtrEffSrc);
1795 IEM_MC_ADVANCE_RIP();
1796 IEM_MC_END();
1797 return VINF_SUCCESS;
1798}
1799
1800
1801/** Opcode 0x0f 0x0e. */
1802FNIEMOP_DEF(iemOp_femms)
1803{
1804 IEMOP_MNEMONIC(femms, "femms");
1805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1806
1807 IEM_MC_BEGIN(0,0);
1808 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1809 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1810 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1811 IEM_MC_FPU_FROM_MMX_MODE();
1812 IEM_MC_ADVANCE_RIP();
1813 IEM_MC_END();
1814 return VINF_SUCCESS;
1815}
1816
1817
1818/** Opcode 0x0f 0x0f. */
1819FNIEMOP_DEF(iemOp_3Dnow)
1820{
1821 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1822 {
1823 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1824 return IEMOP_RAISE_INVALID_OPCODE();
1825 }
1826
1827#ifdef IEM_WITH_3DNOW
1828 /* This is pretty sparse, use switch instead of table. */
1829 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1830 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1831#else
1832 IEMOP_BITCH_ABOUT_STUB();
1833 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1834#endif
1835}
1836
1837
1838/**
1839 * @opcode 0x10
1840 * @oppfx none
1841 * @opcpuid sse
1842 * @opgroup og_sse_simdfp_datamove
1843 * @opxcpttype 4UA
1844 * @optest op1=1 op2=2 -> op1=2
1845 * @optest op1=0 op2=-22 -> op1=-22
1846 */
1847FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1848{
1849 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1851 if (IEM_IS_MODRM_REG_MODE(bRm))
1852 {
1853 /*
1854 * Register, register.
1855 */
1856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1857 IEM_MC_BEGIN(0, 0);
1858 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1859 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1860 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1861 IEM_GET_MODRM_RM(pVCpu, bRm));
1862 IEM_MC_ADVANCE_RIP();
1863 IEM_MC_END();
1864 }
1865 else
1866 {
1867 /*
1868 * Memory, register.
1869 */
1870 IEM_MC_BEGIN(0, 2);
1871 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1873
1874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1876 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1877 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1878
1879 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1881
1882 IEM_MC_ADVANCE_RIP();
1883 IEM_MC_END();
1884 }
1885 return VINF_SUCCESS;
1886
1887}
1888
1889
1890/**
1891 * @opcode 0x10
1892 * @oppfx 0x66
1893 * @opcpuid sse2
1894 * @opgroup og_sse2_pcksclr_datamove
1895 * @opxcpttype 4UA
1896 * @optest op1=1 op2=2 -> op1=2
1897 * @optest op1=0 op2=-42 -> op1=-42
1898 */
1899FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1900{
1901 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1903 if (IEM_IS_MODRM_REG_MODE(bRm))
1904 {
1905 /*
1906 * Register, register.
1907 */
1908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1909 IEM_MC_BEGIN(0, 0);
1910 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1911 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1912 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1913 IEM_GET_MODRM_RM(pVCpu, bRm));
1914 IEM_MC_ADVANCE_RIP();
1915 IEM_MC_END();
1916 }
1917 else
1918 {
1919 /*
1920 * Memory, register.
1921 */
1922 IEM_MC_BEGIN(0, 2);
1923 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1929 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1930
1931 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1932 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1933
1934 IEM_MC_ADVANCE_RIP();
1935 IEM_MC_END();
1936 }
1937 return VINF_SUCCESS;
1938}
1939
1940
1941/**
1942 * @opcode 0x10
1943 * @oppfx 0xf3
1944 * @opcpuid sse
1945 * @opgroup og_sse_simdfp_datamove
1946 * @opxcpttype 5
1947 * @optest op1=1 op2=2 -> op1=2
1948 * @optest op1=0 op2=-22 -> op1=-22
1949 */
1950FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1951{
1952 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1954 if (IEM_IS_MODRM_REG_MODE(bRm))
1955 {
1956 /*
1957 * Register, register.
1958 */
1959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1960 IEM_MC_BEGIN(0, 1);
1961 IEM_MC_LOCAL(uint32_t, uSrc);
1962
1963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1964 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1965 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1966 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1967
1968 IEM_MC_ADVANCE_RIP();
1969 IEM_MC_END();
1970 }
1971 else
1972 {
1973 /*
1974 * Memory, register.
1975 */
1976 IEM_MC_BEGIN(0, 2);
1977 IEM_MC_LOCAL(uint32_t, uSrc);
1978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1979
1980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1984
1985 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1986 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1987
1988 IEM_MC_ADVANCE_RIP();
1989 IEM_MC_END();
1990 }
1991 return VINF_SUCCESS;
1992}
1993
1994
1995/**
1996 * @opcode 0x10
1997 * @oppfx 0xf2
1998 * @opcpuid sse2
1999 * @opgroup og_sse2_pcksclr_datamove
2000 * @opxcpttype 5
2001 * @optest op1=1 op2=2 -> op1=2
2002 * @optest op1=0 op2=-42 -> op1=-42
2003 */
2004FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2005{
2006 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if (IEM_IS_MODRM_REG_MODE(bRm))
2009 {
2010 /*
2011 * Register, register.
2012 */
2013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2014 IEM_MC_BEGIN(0, 1);
2015 IEM_MC_LOCAL(uint64_t, uSrc);
2016
2017 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2019 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2020 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2021
2022 IEM_MC_ADVANCE_RIP();
2023 IEM_MC_END();
2024 }
2025 else
2026 {
2027 /*
2028 * Memory, register.
2029 */
2030 IEM_MC_BEGIN(0, 2);
2031 IEM_MC_LOCAL(uint64_t, uSrc);
2032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2033
2034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2038
2039 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2040 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2041
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 }
2045 return VINF_SUCCESS;
2046}
2047
2048
2049/**
2050 * @opcode 0x11
2051 * @oppfx none
2052 * @opcpuid sse
2053 * @opgroup og_sse_simdfp_datamove
2054 * @opxcpttype 4UA
2055 * @optest op1=1 op2=2 -> op1=2
2056 * @optest op1=0 op2=-42 -> op1=-42
2057 */
2058FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2059{
2060 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2062 if (IEM_IS_MODRM_REG_MODE(bRm))
2063 {
2064 /*
2065 * Register, register.
2066 */
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068 IEM_MC_BEGIN(0, 0);
2069 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2070 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2071 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2072 IEM_GET_MODRM_REG(pVCpu, bRm));
2073 IEM_MC_ADVANCE_RIP();
2074 IEM_MC_END();
2075 }
2076 else
2077 {
2078 /*
2079 * Memory, register.
2080 */
2081 IEM_MC_BEGIN(0, 2);
2082 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2084
2085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2087 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2088 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2089
2090 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2091 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2092
2093 IEM_MC_ADVANCE_RIP();
2094 IEM_MC_END();
2095 }
2096 return VINF_SUCCESS;
2097}
2098
2099
2100/**
2101 * @opcode 0x11
2102 * @oppfx 0x66
2103 * @opcpuid sse2
2104 * @opgroup og_sse2_pcksclr_datamove
2105 * @opxcpttype 4UA
2106 * @optest op1=1 op2=2 -> op1=2
2107 * @optest op1=0 op2=-42 -> op1=-42
2108 */
2109FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2110{
2111 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2113 if (IEM_IS_MODRM_REG_MODE(bRm))
2114 {
2115 /*
2116 * Register, register.
2117 */
2118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2119 IEM_MC_BEGIN(0, 0);
2120 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2121 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2122 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2123 IEM_GET_MODRM_REG(pVCpu, bRm));
2124 IEM_MC_ADVANCE_RIP();
2125 IEM_MC_END();
2126 }
2127 else
2128 {
2129 /*
2130 * Memory, register.
2131 */
2132 IEM_MC_BEGIN(0, 2);
2133 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2135
2136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2140
2141 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2142 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2143
2144 IEM_MC_ADVANCE_RIP();
2145 IEM_MC_END();
2146 }
2147 return VINF_SUCCESS;
2148}
2149
2150
2151/**
2152 * @opcode 0x11
2153 * @oppfx 0xf3
2154 * @opcpuid sse
2155 * @opgroup og_sse_simdfp_datamove
2156 * @opxcpttype 5
2157 * @optest op1=1 op2=2 -> op1=2
2158 * @optest op1=0 op2=-22 -> op1=-22
2159 */
2160FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2161{
2162 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2164 if (IEM_IS_MODRM_REG_MODE(bRm))
2165 {
2166 /*
2167 * Register, register.
2168 */
2169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2170 IEM_MC_BEGIN(0, 1);
2171 IEM_MC_LOCAL(uint32_t, uSrc);
2172
2173 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2174 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2175 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2176 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2177
2178 IEM_MC_ADVANCE_RIP();
2179 IEM_MC_END();
2180 }
2181 else
2182 {
2183 /*
2184 * Memory, register.
2185 */
2186 IEM_MC_BEGIN(0, 2);
2187 IEM_MC_LOCAL(uint32_t, uSrc);
2188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2189
2190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2192 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2193 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2194
2195 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2196 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2197
2198 IEM_MC_ADVANCE_RIP();
2199 IEM_MC_END();
2200 }
2201 return VINF_SUCCESS;
2202}
2203
2204
2205/**
2206 * @opcode 0x11
2207 * @oppfx 0xf2
2208 * @opcpuid sse2
2209 * @opgroup og_sse2_pcksclr_datamove
2210 * @opxcpttype 5
2211 * @optest op1=1 op2=2 -> op1=2
2212 * @optest op1=0 op2=-42 -> op1=-42
2213 */
2214FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2215{
2216 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2218 if (IEM_IS_MODRM_REG_MODE(bRm))
2219 {
2220 /*
2221 * Register, register.
2222 */
2223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2224 IEM_MC_BEGIN(0, 1);
2225 IEM_MC_LOCAL(uint64_t, uSrc);
2226
2227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2229 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2230 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2231
2232 IEM_MC_ADVANCE_RIP();
2233 IEM_MC_END();
2234 }
2235 else
2236 {
2237 /*
2238 * Memory, register.
2239 */
2240 IEM_MC_BEGIN(0, 2);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2243
2244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2246 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2248
2249 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2250 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2251
2252 IEM_MC_ADVANCE_RIP();
2253 IEM_MC_END();
2254 }
2255 return VINF_SUCCESS;
2256}
2257
2258
2259FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2260{
2261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2262 if (IEM_IS_MODRM_REG_MODE(bRm))
2263 {
2264 /**
2265 * @opcode 0x12
2266 * @opcodesub 11 mr/reg
2267 * @oppfx none
2268 * @opcpuid sse
2269 * @opgroup og_sse_simdfp_datamove
2270 * @opxcpttype 5
2271 * @optest op1=1 op2=2 -> op1=2
2272 * @optest op1=0 op2=-42 -> op1=-42
2273 */
2274 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2275
2276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2277 IEM_MC_BEGIN(0, 1);
2278 IEM_MC_LOCAL(uint64_t, uSrc);
2279
2280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2281 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2282 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2283 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2284
2285 IEM_MC_ADVANCE_RIP();
2286 IEM_MC_END();
2287 }
2288 else
2289 {
2290 /**
2291 * @opdone
2292 * @opcode 0x12
2293 * @opcodesub !11 mr/reg
2294 * @oppfx none
2295 * @opcpuid sse
2296 * @opgroup og_sse_simdfp_datamove
2297 * @opxcpttype 5
2298 * @optest op1=1 op2=2 -> op1=2
2299 * @optest op1=0 op2=-42 -> op1=-42
2300 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2301 */
2302 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2303
2304 IEM_MC_BEGIN(0, 2);
2305 IEM_MC_LOCAL(uint64_t, uSrc);
2306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2307
2308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312
2313 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2314 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2315
2316 IEM_MC_ADVANCE_RIP();
2317 IEM_MC_END();
2318 }
2319 return VINF_SUCCESS;
2320}
2321
2322
2323/**
2324 * @opcode 0x12
2325 * @opcodesub !11 mr/reg
2326 * @oppfx 0x66
2327 * @opcpuid sse2
2328 * @opgroup og_sse2_pcksclr_datamove
2329 * @opxcpttype 5
2330 * @optest op1=1 op2=2 -> op1=2
2331 * @optest op1=0 op2=-42 -> op1=-42
2332 */
2333FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2334{
2335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2336 if (IEM_IS_MODRM_MEM_MODE(bRm))
2337 {
2338 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2339
2340 IEM_MC_BEGIN(0, 2);
2341 IEM_MC_LOCAL(uint64_t, uSrc);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343
2344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2346 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2348
2349 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2350 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2351
2352 IEM_MC_ADVANCE_RIP();
2353 IEM_MC_END();
2354 return VINF_SUCCESS;
2355 }
2356
2357 /**
2358 * @opdone
2359 * @opmnemonic ud660f12m3
2360 * @opcode 0x12
2361 * @opcodesub 11 mr/reg
2362 * @oppfx 0x66
2363 * @opunused immediate
2364 * @opcpuid sse
2365 * @optest ->
2366 */
2367 return IEMOP_RAISE_INVALID_OPCODE();
2368}
2369
2370
2371/**
2372 * @opcode 0x12
2373 * @oppfx 0xf3
2374 * @opcpuid sse3
2375 * @opgroup og_sse3_pcksclr_datamove
2376 * @opxcpttype 4
2377 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2378 * op1=0x00000002000000020000000100000001
2379 */
2380FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2381{
2382 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2384 if (IEM_IS_MODRM_REG_MODE(bRm))
2385 {
2386 /*
2387 * Register, register.
2388 */
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2390 IEM_MC_BEGIN(2, 0);
2391 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2392 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2393
2394 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2395 IEM_MC_PREPARE_SSE_USAGE();
2396
2397 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2398 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2399 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2400
2401 IEM_MC_ADVANCE_RIP();
2402 IEM_MC_END();
2403 }
2404 else
2405 {
2406 /*
2407 * Register, memory.
2408 */
2409 IEM_MC_BEGIN(2, 2);
2410 IEM_MC_LOCAL(RTUINT128U, uSrc);
2411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2412 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2413 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2414
2415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2417 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2418 IEM_MC_PREPARE_SSE_USAGE();
2419
2420 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2421 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2423
2424 IEM_MC_ADVANCE_RIP();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430
2431/**
2432 * @opcode 0x12
2433 * @oppfx 0xf2
2434 * @opcpuid sse3
2435 * @opgroup og_sse3_pcksclr_datamove
2436 * @opxcpttype 5
2437 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2438 * op1=0x22222222111111112222222211111111
2439 */
2440FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2441{
2442 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2444 if (IEM_IS_MODRM_REG_MODE(bRm))
2445 {
2446 /*
2447 * Register, register.
2448 */
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_BEGIN(2, 0);
2451 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2452 IEM_MC_ARG(uint64_t, uSrc, 1);
2453
2454 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2455 IEM_MC_PREPARE_SSE_USAGE();
2456
2457 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2458 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2459 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2460
2461 IEM_MC_ADVANCE_RIP();
2462 IEM_MC_END();
2463 }
2464 else
2465 {
2466 /*
2467 * Register, memory.
2468 */
2469 IEM_MC_BEGIN(2, 2);
2470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2471 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2472 IEM_MC_ARG(uint64_t, uSrc, 1);
2473
2474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2476 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2477 IEM_MC_PREPARE_SSE_USAGE();
2478
2479 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2480 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2481 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2482
2483 IEM_MC_ADVANCE_RIP();
2484 IEM_MC_END();
2485 }
2486 return VINF_SUCCESS;
2487}
2488
2489
2490/**
2491 * @opcode 0x13
2492 * @opcodesub !11 mr/reg
2493 * @oppfx none
2494 * @opcpuid sse
2495 * @opgroup og_sse_simdfp_datamove
2496 * @opxcpttype 5
2497 * @optest op1=1 op2=2 -> op1=2
2498 * @optest op1=0 op2=-42 -> op1=-42
2499 */
2500FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2501{
2502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2503 if (IEM_IS_MODRM_MEM_MODE(bRm))
2504 {
2505 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2506
2507 IEM_MC_BEGIN(0, 2);
2508 IEM_MC_LOCAL(uint64_t, uSrc);
2509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2510
2511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2515
2516 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2517 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2518
2519 IEM_MC_ADVANCE_RIP();
2520 IEM_MC_END();
2521 return VINF_SUCCESS;
2522 }
2523
2524 /**
2525 * @opdone
2526 * @opmnemonic ud0f13m3
2527 * @opcode 0x13
2528 * @opcodesub 11 mr/reg
2529 * @oppfx none
2530 * @opunused immediate
2531 * @opcpuid sse
2532 * @optest ->
2533 */
2534 return IEMOP_RAISE_INVALID_OPCODE();
2535}
2536
2537
2538/**
2539 * @opcode 0x13
2540 * @opcodesub !11 mr/reg
2541 * @oppfx 0x66
2542 * @opcpuid sse2
2543 * @opgroup og_sse2_pcksclr_datamove
2544 * @opxcpttype 5
2545 * @optest op1=1 op2=2 -> op1=2
2546 * @optest op1=0 op2=-42 -> op1=-42
2547 */
2548FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2549{
2550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2551 if (IEM_IS_MODRM_MEM_MODE(bRm))
2552 {
2553 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2554 IEM_MC_BEGIN(0, 2);
2555 IEM_MC_LOCAL(uint64_t, uSrc);
2556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2557
2558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2561 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2562
2563 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2564 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2565
2566 IEM_MC_ADVANCE_RIP();
2567 IEM_MC_END();
2568 return VINF_SUCCESS;
2569 }
2570
2571 /**
2572 * @opdone
2573 * @opmnemonic ud660f13m3
2574 * @opcode 0x13
2575 * @opcodesub 11 mr/reg
2576 * @oppfx 0x66
2577 * @opunused immediate
2578 * @opcpuid sse
2579 * @optest ->
2580 */
2581 return IEMOP_RAISE_INVALID_OPCODE();
2582}
2583
2584
2585/**
2586 * @opmnemonic udf30f13
2587 * @opcode 0x13
2588 * @oppfx 0xf3
2589 * @opunused intel-modrm
2590 * @opcpuid sse
2591 * @optest ->
2592 * @opdone
2593 */
2594
2595/**
2596 * @opmnemonic udf20f13
2597 * @opcode 0x13
2598 * @oppfx 0xf2
2599 * @opunused intel-modrm
2600 * @opcpuid sse
2601 * @optest ->
2602 * @opdone
2603 */
2604
2605/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2606FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2607{
2608 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2609 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2610}
2611
2612
2613/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2614FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2615{
2616 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2617 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2618}
2619
2620
2621/**
2622 * @opdone
2623 * @opmnemonic udf30f14
2624 * @opcode 0x14
2625 * @oppfx 0xf3
2626 * @opunused intel-modrm
2627 * @opcpuid sse
2628 * @optest ->
2629 * @opdone
2630 */
2631
2632/**
2633 * @opmnemonic udf20f14
2634 * @opcode 0x14
2635 * @oppfx 0xf2
2636 * @opunused intel-modrm
2637 * @opcpuid sse
2638 * @optest ->
2639 * @opdone
2640 */
2641
2642/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2643FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2644{
2645 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2646 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2647}
2648
2649
2650/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2651FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2652{
2653 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2654 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2655}
2656
2657
2658/* Opcode 0xf3 0x0f 0x15 - invalid */
2659/* Opcode 0xf2 0x0f 0x15 - invalid */
2660
2661/**
2662 * @opdone
2663 * @opmnemonic udf30f15
2664 * @opcode 0x15
2665 * @oppfx 0xf3
2666 * @opunused intel-modrm
2667 * @opcpuid sse
2668 * @optest ->
2669 * @opdone
2670 */
2671
2672/**
2673 * @opmnemonic udf20f15
2674 * @opcode 0x15
2675 * @oppfx 0xf2
2676 * @opunused intel-modrm
2677 * @opcpuid sse
2678 * @optest ->
2679 * @opdone
2680 */
2681
2682FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2683{
2684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2685 if (IEM_IS_MODRM_REG_MODE(bRm))
2686 {
2687 /**
2688 * @opcode 0x16
2689 * @opcodesub 11 mr/reg
2690 * @oppfx none
2691 * @opcpuid sse
2692 * @opgroup og_sse_simdfp_datamove
2693 * @opxcpttype 5
2694 * @optest op1=1 op2=2 -> op1=2
2695 * @optest op1=0 op2=-42 -> op1=-42
2696 */
2697 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2698
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(0, 1);
2701 IEM_MC_LOCAL(uint64_t, uSrc);
2702
2703 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2704 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2705 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2706 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2707
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /**
2714 * @opdone
2715 * @opcode 0x16
2716 * @opcodesub !11 mr/reg
2717 * @oppfx none
2718 * @opcpuid sse
2719 * @opgroup og_sse_simdfp_datamove
2720 * @opxcpttype 5
2721 * @optest op1=1 op2=2 -> op1=2
2722 * @optest op1=0 op2=-42 -> op1=-42
2723 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2724 */
2725 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2726
2727 IEM_MC_BEGIN(0, 2);
2728 IEM_MC_LOCAL(uint64_t, uSrc);
2729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2730
2731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2733 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2734 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2735
2736 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2737 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2738
2739 IEM_MC_ADVANCE_RIP();
2740 IEM_MC_END();
2741 }
2742 return VINF_SUCCESS;
2743}
2744
2745
2746/**
2747 * @opcode 0x16
2748 * @opcodesub !11 mr/reg
2749 * @oppfx 0x66
2750 * @opcpuid sse2
2751 * @opgroup og_sse2_pcksclr_datamove
2752 * @opxcpttype 5
2753 * @optest op1=1 op2=2 -> op1=2
2754 * @optest op1=0 op2=-42 -> op1=-42
2755 */
2756FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2757{
2758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2759 if (IEM_IS_MODRM_MEM_MODE(bRm))
2760 {
2761 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2762 IEM_MC_BEGIN(0, 2);
2763 IEM_MC_LOCAL(uint64_t, uSrc);
2764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2765
2766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2768 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2770
2771 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2772 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2773
2774 IEM_MC_ADVANCE_RIP();
2775 IEM_MC_END();
2776 return VINF_SUCCESS;
2777 }
2778
2779 /**
2780 * @opdone
2781 * @opmnemonic ud660f16m3
2782 * @opcode 0x16
2783 * @opcodesub 11 mr/reg
2784 * @oppfx 0x66
2785 * @opunused immediate
2786 * @opcpuid sse
2787 * @optest ->
2788 */
2789 return IEMOP_RAISE_INVALID_OPCODE();
2790}
2791
2792
2793/**
2794 * @opcode 0x16
2795 * @oppfx 0xf3
2796 * @opcpuid sse3
2797 * @opgroup og_sse3_pcksclr_datamove
2798 * @opxcpttype 4
2799 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2800 * op1=0x00000002000000020000000100000001
2801 */
2802FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2803{
2804 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2806 if (IEM_IS_MODRM_REG_MODE(bRm))
2807 {
2808 /*
2809 * Register, register.
2810 */
2811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2812 IEM_MC_BEGIN(2, 0);
2813 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2814 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2815
2816 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2817 IEM_MC_PREPARE_SSE_USAGE();
2818
2819 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2820 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2821 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2822
2823 IEM_MC_ADVANCE_RIP();
2824 IEM_MC_END();
2825 }
2826 else
2827 {
2828 /*
2829 * Register, memory.
2830 */
2831 IEM_MC_BEGIN(2, 2);
2832 IEM_MC_LOCAL(RTUINT128U, uSrc);
2833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2834 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2835 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2836
2837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2839 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2840 IEM_MC_PREPARE_SSE_USAGE();
2841
2842 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2843 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2844 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2845
2846 IEM_MC_ADVANCE_RIP();
2847 IEM_MC_END();
2848 }
2849 return VINF_SUCCESS;
2850}
2851
2852/**
2853 * @opdone
2854 * @opmnemonic udf30f16
2855 * @opcode 0x16
2856 * @oppfx 0xf2
2857 * @opunused intel-modrm
2858 * @opcpuid sse
2859 * @optest ->
2860 * @opdone
2861 */
2862
2863
2864/**
2865 * @opcode 0x17
2866 * @opcodesub !11 mr/reg
2867 * @oppfx none
2868 * @opcpuid sse
2869 * @opgroup og_sse_simdfp_datamove
2870 * @opxcpttype 5
2871 * @optest op1=1 op2=2 -> op1=2
2872 * @optest op1=0 op2=-42 -> op1=-42
2873 */
2874FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2875{
2876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2877 if (IEM_IS_MODRM_MEM_MODE(bRm))
2878 {
2879 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2880
2881 IEM_MC_BEGIN(0, 2);
2882 IEM_MC_LOCAL(uint64_t, uSrc);
2883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2884
2885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2887 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2888 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2889
2890 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2891 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2892
2893 IEM_MC_ADVANCE_RIP();
2894 IEM_MC_END();
2895 return VINF_SUCCESS;
2896 }
2897
2898 /**
2899 * @opdone
2900 * @opmnemonic ud0f17m3
2901 * @opcode 0x17
2902 * @opcodesub 11 mr/reg
2903 * @oppfx none
2904 * @opunused immediate
2905 * @opcpuid sse
2906 * @optest ->
2907 */
2908 return IEMOP_RAISE_INVALID_OPCODE();
2909}
2910
2911
2912/**
2913 * @opcode 0x17
2914 * @opcodesub !11 mr/reg
2915 * @oppfx 0x66
2916 * @opcpuid sse2
2917 * @opgroup og_sse2_pcksclr_datamove
2918 * @opxcpttype 5
2919 * @optest op1=1 op2=2 -> op1=2
2920 * @optest op1=0 op2=-42 -> op1=-42
2921 */
2922FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2923{
2924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2925 if (IEM_IS_MODRM_MEM_MODE(bRm))
2926 {
2927 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2928
2929 IEM_MC_BEGIN(0, 2);
2930 IEM_MC_LOCAL(uint64_t, uSrc);
2931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2932
2933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2936 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2937
2938 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2939 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2940
2941 IEM_MC_ADVANCE_RIP();
2942 IEM_MC_END();
2943 return VINF_SUCCESS;
2944 }
2945
2946 /**
2947 * @opdone
2948 * @opmnemonic ud660f17m3
2949 * @opcode 0x17
2950 * @opcodesub 11 mr/reg
2951 * @oppfx 0x66
2952 * @opunused immediate
2953 * @opcpuid sse
2954 * @optest ->
2955 */
2956 return IEMOP_RAISE_INVALID_OPCODE();
2957}
2958
2959
2960/**
2961 * @opdone
2962 * @opmnemonic udf30f17
2963 * @opcode 0x17
2964 * @oppfx 0xf3
2965 * @opunused intel-modrm
2966 * @opcpuid sse
2967 * @optest ->
2968 * @opdone
2969 */
2970
2971/**
2972 * @opmnemonic udf20f17
2973 * @opcode 0x17
2974 * @oppfx 0xf2
2975 * @opunused intel-modrm
2976 * @opcpuid sse
2977 * @optest ->
2978 * @opdone
2979 */
2980
2981
2982/** Opcode 0x0f 0x18. */
2983FNIEMOP_DEF(iemOp_prefetch_Grp16)
2984{
2985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2986 if (IEM_IS_MODRM_MEM_MODE(bRm))
2987 {
2988 switch (IEM_GET_MODRM_REG_8(bRm))
2989 {
2990 case 4: /* Aliased to /0 for the time being according to AMD. */
2991 case 5: /* Aliased to /0 for the time being according to AMD. */
2992 case 6: /* Aliased to /0 for the time being according to AMD. */
2993 case 7: /* Aliased to /0 for the time being according to AMD. */
2994 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2995 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2996 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2997 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2999 }
3000
3001 IEM_MC_BEGIN(0, 1);
3002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 /* Currently a NOP. */
3006 NOREF(GCPtrEffSrc);
3007 IEM_MC_ADVANCE_RIP();
3008 IEM_MC_END();
3009 return VINF_SUCCESS;
3010 }
3011
3012 return IEMOP_RAISE_INVALID_OPCODE();
3013}
3014
3015
3016/** Opcode 0x0f 0x19..0x1f. */
3017FNIEMOP_DEF(iemOp_nop_Ev)
3018{
3019 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3021 if (IEM_IS_MODRM_REG_MODE(bRm))
3022 {
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3024 IEM_MC_BEGIN(0, 0);
3025 IEM_MC_ADVANCE_RIP();
3026 IEM_MC_END();
3027 }
3028 else
3029 {
3030 IEM_MC_BEGIN(0, 1);
3031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034 /* Currently a NOP. */
3035 NOREF(GCPtrEffSrc);
3036 IEM_MC_ADVANCE_RIP();
3037 IEM_MC_END();
3038 }
3039 return VINF_SUCCESS;
3040}
3041
3042
3043/** Opcode 0x0f 0x20. */
3044FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3045{
3046 /* mod is ignored, as is operand size overrides. */
3047 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3048 IEMOP_HLP_MIN_386();
3049 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3050 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3051 else
3052 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3053
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3056 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3057 {
3058 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3059 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3060 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3061 iCrReg |= 8;
3062 }
3063 switch (iCrReg)
3064 {
3065 case 0: case 2: case 3: case 4: case 8:
3066 break;
3067 default:
3068 return IEMOP_RAISE_INVALID_OPCODE();
3069 }
3070 IEMOP_HLP_DONE_DECODING();
3071
3072 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3073}
3074
3075
3076/** Opcode 0x0f 0x21. */
3077FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3078{
3079 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3080 IEMOP_HLP_MIN_386();
3081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3083 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3084 return IEMOP_RAISE_INVALID_OPCODE();
3085 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3086 IEM_GET_MODRM_RM(pVCpu, bRm),
3087 IEM_GET_MODRM_REG_8(bRm));
3088}
3089
3090
3091/** Opcode 0x0f 0x22. */
3092FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3093{
3094 /* mod is ignored, as is operand size overrides. */
3095 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3096 IEMOP_HLP_MIN_386();
3097 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3098 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3099 else
3100 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3101
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3104 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3105 {
3106 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3107 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3108 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3109 iCrReg |= 8;
3110 }
3111 switch (iCrReg)
3112 {
3113 case 0: case 2: case 3: case 4: case 8:
3114 break;
3115 default:
3116 return IEMOP_RAISE_INVALID_OPCODE();
3117 }
3118 IEMOP_HLP_DONE_DECODING();
3119
3120 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3121}
3122
3123
3124/** Opcode 0x0f 0x23. */
3125FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3126{
3127 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3128 IEMOP_HLP_MIN_386();
3129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3131 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3132 return IEMOP_RAISE_INVALID_OPCODE();
3133 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3134 IEM_GET_MODRM_REG_8(bRm),
3135 IEM_GET_MODRM_RM(pVCpu, bRm));
3136}
3137
3138
3139/** Opcode 0x0f 0x24. */
3140FNIEMOP_DEF(iemOp_mov_Rd_Td)
3141{
3142 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3143 IEMOP_HLP_MIN_386();
3144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3146 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3147 return IEMOP_RAISE_INVALID_OPCODE();
3148 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3149 IEM_GET_MODRM_RM(pVCpu, bRm),
3150 IEM_GET_MODRM_REG_8(bRm));
3151}
3152
3153
3154/** Opcode 0x0f 0x26. */
3155FNIEMOP_DEF(iemOp_mov_Td_Rd)
3156{
3157 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3158 IEMOP_HLP_MIN_386();
3159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3162 return IEMOP_RAISE_INVALID_OPCODE();
3163 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3164 IEM_GET_MODRM_REG_8(bRm),
3165 IEM_GET_MODRM_RM(pVCpu, bRm));
3166}
3167
3168
3169/**
3170 * @opcode 0x28
3171 * @oppfx none
3172 * @opcpuid sse
3173 * @opgroup og_sse_simdfp_datamove
3174 * @opxcpttype 1
3175 * @optest op1=1 op2=2 -> op1=2
3176 * @optest op1=0 op2=-42 -> op1=-42
3177 */
3178FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3179{
3180 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3182 if (IEM_IS_MODRM_REG_MODE(bRm))
3183 {
3184 /*
3185 * Register, register.
3186 */
3187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3188 IEM_MC_BEGIN(0, 0);
3189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3191 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3192 IEM_GET_MODRM_RM(pVCpu, bRm));
3193 IEM_MC_ADVANCE_RIP();
3194 IEM_MC_END();
3195 }
3196 else
3197 {
3198 /*
3199 * Register, memory.
3200 */
3201 IEM_MC_BEGIN(0, 2);
3202 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3204
3205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3207 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3208 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3209
3210 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3211 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3212
3213 IEM_MC_ADVANCE_RIP();
3214 IEM_MC_END();
3215 }
3216 return VINF_SUCCESS;
3217}
3218
3219/**
3220 * @opcode 0x28
3221 * @oppfx 66
3222 * @opcpuid sse2
3223 * @opgroup og_sse2_pcksclr_datamove
3224 * @opxcpttype 1
3225 * @optest op1=1 op2=2 -> op1=2
3226 * @optest op1=0 op2=-42 -> op1=-42
3227 */
3228FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3229{
3230 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3232 if (IEM_IS_MODRM_REG_MODE(bRm))
3233 {
3234 /*
3235 * Register, register.
3236 */
3237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3238 IEM_MC_BEGIN(0, 0);
3239 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3240 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3241 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3242 IEM_GET_MODRM_RM(pVCpu, bRm));
3243 IEM_MC_ADVANCE_RIP();
3244 IEM_MC_END();
3245 }
3246 else
3247 {
3248 /*
3249 * Register, memory.
3250 */
3251 IEM_MC_BEGIN(0, 2);
3252 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3254
3255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3258 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3259
3260 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3261 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3262
3263 IEM_MC_ADVANCE_RIP();
3264 IEM_MC_END();
3265 }
3266 return VINF_SUCCESS;
3267}
3268
3269/* Opcode 0xf3 0x0f 0x28 - invalid */
3270/* Opcode 0xf2 0x0f 0x28 - invalid */
3271
3272/**
3273 * @opcode 0x29
3274 * @oppfx none
3275 * @opcpuid sse
3276 * @opgroup og_sse_simdfp_datamove
3277 * @opxcpttype 1
3278 * @optest op1=1 op2=2 -> op1=2
3279 * @optest op1=0 op2=-42 -> op1=-42
3280 */
3281FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3282{
3283 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3285 if (IEM_IS_MODRM_REG_MODE(bRm))
3286 {
3287 /*
3288 * Register, register.
3289 */
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_BEGIN(0, 0);
3292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3294 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3295 IEM_GET_MODRM_REG(pVCpu, bRm));
3296 IEM_MC_ADVANCE_RIP();
3297 IEM_MC_END();
3298 }
3299 else
3300 {
3301 /*
3302 * Memory, register.
3303 */
3304 IEM_MC_BEGIN(0, 2);
3305 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3307
3308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3312
3313 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3314 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3315
3316 IEM_MC_ADVANCE_RIP();
3317 IEM_MC_END();
3318 }
3319 return VINF_SUCCESS;
3320}
3321
3322/**
3323 * @opcode 0x29
3324 * @oppfx 66
3325 * @opcpuid sse2
3326 * @opgroup og_sse2_pcksclr_datamove
3327 * @opxcpttype 1
3328 * @optest op1=1 op2=2 -> op1=2
3329 * @optest op1=0 op2=-42 -> op1=-42
3330 */
3331FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3332{
3333 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3335 if (IEM_IS_MODRM_REG_MODE(bRm))
3336 {
3337 /*
3338 * Register, register.
3339 */
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 IEM_MC_BEGIN(0, 0);
3342 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3343 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3344 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3345 IEM_GET_MODRM_REG(pVCpu, bRm));
3346 IEM_MC_ADVANCE_RIP();
3347 IEM_MC_END();
3348 }
3349 else
3350 {
3351 /*
3352 * Memory, register.
3353 */
3354 IEM_MC_BEGIN(0, 2);
3355 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3357
3358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3362
3363 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3364 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3365
3366 IEM_MC_ADVANCE_RIP();
3367 IEM_MC_END();
3368 }
3369 return VINF_SUCCESS;
3370}
3371
3372/* Opcode 0xf3 0x0f 0x29 - invalid */
3373/* Opcode 0xf2 0x0f 0x29 - invalid */
3374
3375
3376/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3377FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3378/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3379FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3380/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3381FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
3382/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3383FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
3384
3385
3386/**
3387 * @opcode 0x2b
3388 * @opcodesub !11 mr/reg
3389 * @oppfx none
3390 * @opcpuid sse
3391 * @opgroup og_sse1_cachect
3392 * @opxcpttype 1
3393 * @optest op1=1 op2=2 -> op1=2
3394 * @optest op1=0 op2=-42 -> op1=-42
3395 */
3396FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3397{
3398 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3400 if (IEM_IS_MODRM_MEM_MODE(bRm))
3401 {
3402 /*
3403 * memory, register.
3404 */
3405 IEM_MC_BEGIN(0, 2);
3406 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3408
3409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3411 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3412 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3413
3414 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3415 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3416
3417 IEM_MC_ADVANCE_RIP();
3418 IEM_MC_END();
3419 }
3420 /* The register, register encoding is invalid. */
3421 else
3422 return IEMOP_RAISE_INVALID_OPCODE();
3423 return VINF_SUCCESS;
3424}
3425
3426/**
3427 * @opcode 0x2b
3428 * @opcodesub !11 mr/reg
3429 * @oppfx 0x66
3430 * @opcpuid sse2
3431 * @opgroup og_sse2_cachect
3432 * @opxcpttype 1
3433 * @optest op1=1 op2=2 -> op1=2
3434 * @optest op1=0 op2=-42 -> op1=-42
3435 */
3436FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3437{
3438 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3440 if (IEM_IS_MODRM_MEM_MODE(bRm))
3441 {
3442 /*
3443 * memory, register.
3444 */
3445 IEM_MC_BEGIN(0, 2);
3446 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3453
3454 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3455 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3456
3457 IEM_MC_ADVANCE_RIP();
3458 IEM_MC_END();
3459 }
3460 /* The register, register encoding is invalid. */
3461 else
3462 return IEMOP_RAISE_INVALID_OPCODE();
3463 return VINF_SUCCESS;
3464}
3465/* Opcode 0xf3 0x0f 0x2b - invalid */
3466/* Opcode 0xf2 0x0f 0x2b - invalid */
3467
3468
3469/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3470FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3471/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3472FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3473/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3474FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
3475/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
3476FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
3477
3478/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
3479FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
3480/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
3481FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
3482/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
3483FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
3484/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
3485FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
3486
3487/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
3488FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
3489/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
3490FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
3491/* Opcode 0xf3 0x0f 0x2e - invalid */
3492/* Opcode 0xf2 0x0f 0x2e - invalid */
3493
3494/** Opcode 0x0f 0x2f - comiss Vss, Wss */
3495FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
3496/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
3497FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
3498/* Opcode 0xf3 0x0f 0x2f - invalid */
3499/* Opcode 0xf2 0x0f 0x2f - invalid */
3500
3501/** Opcode 0x0f 0x30. */
3502FNIEMOP_DEF(iemOp_wrmsr)
3503{
3504 IEMOP_MNEMONIC(wrmsr, "wrmsr");
3505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3506 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
3507}
3508
3509
3510/** Opcode 0x0f 0x31. */
3511FNIEMOP_DEF(iemOp_rdtsc)
3512{
3513 IEMOP_MNEMONIC(rdtsc, "rdtsc");
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
3516}
3517
3518
3519/** Opcode 0x0f 0x33. */
3520FNIEMOP_DEF(iemOp_rdmsr)
3521{
3522 IEMOP_MNEMONIC(rdmsr, "rdmsr");
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
3525}
3526
3527
3528/** Opcode 0x0f 0x34. */
3529FNIEMOP_DEF(iemOp_rdpmc)
3530{
3531 IEMOP_MNEMONIC(rdpmc, "rdpmc");
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3533 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
3534}
3535
3536
3537/** Opcode 0x0f 0x34. */
3538FNIEMOP_DEF(iemOp_sysenter)
3539{
3540 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3542 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
3543}
3544
3545/** Opcode 0x0f 0x35. */
3546FNIEMOP_DEF(iemOp_sysexit)
3547{
3548 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3550 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
3551}
3552
3553/** Opcode 0x0f 0x37. */
3554FNIEMOP_STUB(iemOp_getsec);
3555
3556
3557/** Opcode 0x0f 0x38. */
3558FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
3559{
3560#ifdef IEM_WITH_THREE_0F_38
3561 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3562 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3563#else
3564 IEMOP_BITCH_ABOUT_STUB();
3565 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3566#endif
3567}
3568
3569
3570/** Opcode 0x0f 0x3a. */
3571FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
3572{
3573#ifdef IEM_WITH_THREE_0F_3A
3574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3575 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3576#else
3577 IEMOP_BITCH_ABOUT_STUB();
3578 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3579#endif
3580}
3581
3582
3583/**
3584 * Implements a conditional move.
3585 *
3586 * Wish there was an obvious way to do this where we could share and reduce
3587 * code bloat.
3588 *
3589 * @param a_Cnd The conditional "microcode" operation.
3590 */
3591#define CMOV_X(a_Cnd) \
3592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3593 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3594 { \
3595 switch (pVCpu->iem.s.enmEffOpSize) \
3596 { \
3597 case IEMMODE_16BIT: \
3598 IEM_MC_BEGIN(0, 1); \
3599 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3600 a_Cnd { \
3601 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3602 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3603 } IEM_MC_ENDIF(); \
3604 IEM_MC_ADVANCE_RIP(); \
3605 IEM_MC_END(); \
3606 return VINF_SUCCESS; \
3607 \
3608 case IEMMODE_32BIT: \
3609 IEM_MC_BEGIN(0, 1); \
3610 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3611 a_Cnd { \
3612 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3613 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3614 } IEM_MC_ELSE() { \
3615 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3616 } IEM_MC_ENDIF(); \
3617 IEM_MC_ADVANCE_RIP(); \
3618 IEM_MC_END(); \
3619 return VINF_SUCCESS; \
3620 \
3621 case IEMMODE_64BIT: \
3622 IEM_MC_BEGIN(0, 1); \
3623 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3624 a_Cnd { \
3625 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3626 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3627 } IEM_MC_ENDIF(); \
3628 IEM_MC_ADVANCE_RIP(); \
3629 IEM_MC_END(); \
3630 return VINF_SUCCESS; \
3631 \
3632 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3633 } \
3634 } \
3635 else \
3636 { \
3637 switch (pVCpu->iem.s.enmEffOpSize) \
3638 { \
3639 case IEMMODE_16BIT: \
3640 IEM_MC_BEGIN(0, 2); \
3641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3642 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3644 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3645 a_Cnd { \
3646 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3647 } IEM_MC_ENDIF(); \
3648 IEM_MC_ADVANCE_RIP(); \
3649 IEM_MC_END(); \
3650 return VINF_SUCCESS; \
3651 \
3652 case IEMMODE_32BIT: \
3653 IEM_MC_BEGIN(0, 2); \
3654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3655 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3657 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3658 a_Cnd { \
3659 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3660 } IEM_MC_ELSE() { \
3661 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3662 } IEM_MC_ENDIF(); \
3663 IEM_MC_ADVANCE_RIP(); \
3664 IEM_MC_END(); \
3665 return VINF_SUCCESS; \
3666 \
3667 case IEMMODE_64BIT: \
3668 IEM_MC_BEGIN(0, 2); \
3669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3670 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3672 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3673 a_Cnd { \
3674 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3675 } IEM_MC_ENDIF(); \
3676 IEM_MC_ADVANCE_RIP(); \
3677 IEM_MC_END(); \
3678 return VINF_SUCCESS; \
3679 \
3680 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3681 } \
3682 } do {} while (0)
3683
3684
3685
3686/** Opcode 0x0f 0x40. */
3687FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3688{
3689 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3690 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3691}
3692
3693
3694/** Opcode 0x0f 0x41. */
3695FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3696{
3697 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3698 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3699}
3700
3701
3702/** Opcode 0x0f 0x42. */
3703FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3704{
3705 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3706 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3707}
3708
3709
3710/** Opcode 0x0f 0x43. */
3711FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3712{
3713 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3714 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3715}
3716
3717
3718/** Opcode 0x0f 0x44. */
3719FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3720{
3721 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3722 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3723}
3724
3725
3726/** Opcode 0x0f 0x45. */
3727FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3728{
3729 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3730 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3731}
3732
3733
3734/** Opcode 0x0f 0x46. */
3735FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3736{
3737 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3738 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3739}
3740
3741
3742/** Opcode 0x0f 0x47. */
3743FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3744{
3745 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3746 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3747}
3748
3749
3750/** Opcode 0x0f 0x48. */
3751FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3752{
3753 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3754 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3755}
3756
3757
3758/** Opcode 0x0f 0x49. */
3759FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3760{
3761 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3762 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3763}
3764
3765
3766/** Opcode 0x0f 0x4a. */
3767FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3768{
3769 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3770 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3771}
3772
3773
3774/** Opcode 0x0f 0x4b. */
3775FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3776{
3777 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3778 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3779}
3780
3781
3782/** Opcode 0x0f 0x4c. */
3783FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3784{
3785 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3786 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3787}
3788
3789
3790/** Opcode 0x0f 0x4d. */
3791FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3792{
3793 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3794 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3795}
3796
3797
3798/** Opcode 0x0f 0x4e. */
3799FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3800{
3801 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3802 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3803}
3804
3805
3806/** Opcode 0x0f 0x4f. */
3807FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
3808{
3809 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
3810 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3811}
3812
3813#undef CMOV_X
3814
3815/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3816FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3817/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3818FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3819/* Opcode 0xf3 0x0f 0x50 - invalid */
3820/* Opcode 0xf2 0x0f 0x50 - invalid */
3821
3822/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3823FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3824/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3825FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3826/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3827FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3828/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3829FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3830
3831/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3832FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3833/* Opcode 0x66 0x0f 0x52 - invalid */
3834/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3835FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3836/* Opcode 0xf2 0x0f 0x52 - invalid */
3837
3838/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3839FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3840/* Opcode 0x66 0x0f 0x53 - invalid */
3841/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3842FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3843/* Opcode 0xf2 0x0f 0x53 - invalid */
3844
3845
3846/** Opcode 0x0f 0x54 - andps Vps, Wps */
3847FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3848{
3849 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3850 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3851}
3852
3853
3854/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3855FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3856{
3857 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3858 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3859}
3860
3861
3862/* Opcode 0xf3 0x0f 0x54 - invalid */
3863/* Opcode 0xf2 0x0f 0x54 - invalid */
3864
3865
3866/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3867FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3868{
3869 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3870 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3871}
3872
3873
3874/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
3875FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
3876{
3877 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3878 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3879}
3880
3881
3882/* Opcode 0xf3 0x0f 0x55 - invalid */
3883/* Opcode 0xf2 0x0f 0x55 - invalid */
3884
3885
3886/** Opcode 0x0f 0x56 - orps Vps, Wps */
3887FNIEMOP_DEF(iemOp_orps_Vps_Wps)
3888{
3889 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3890 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3891}
3892
3893
3894/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3895FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
3896{
3897 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3898 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3899}
3900
3901
3902/* Opcode 0xf3 0x0f 0x56 - invalid */
3903/* Opcode 0xf2 0x0f 0x56 - invalid */
3904
3905
3906/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3907FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
3908{
3909 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3910 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3911}
3912
3913
3914/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3915FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
3916{
3917 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3918 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3919}
3920
3921
3922/* Opcode 0xf3 0x0f 0x57 - invalid */
3923/* Opcode 0xf2 0x0f 0x57 - invalid */
3924
3925/** Opcode 0x0f 0x58 - addps Vps, Wps */
3926FNIEMOP_DEF(iemOp_addps_Vps_Wps)
3927{
3928 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3929 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
3930}
3931
3932
3933/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3934FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
3935{
3936 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3937 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
3938}
3939
3940
3941/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3942FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3943/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3944FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3945
3946
3947/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3948FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
3949{
3950 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3951 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
3952}
3953
3954
3955/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3956FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
3957{
3958 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3959 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
3960}
3961
3962
3963/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3964FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3965/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3966FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3967
3968/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3969FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3970/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3971FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3972/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3973FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3974/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3975FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3976
3977/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3978FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3979/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3980FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3981/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3982FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3983/* Opcode 0xf2 0x0f 0x5b - invalid */
3984
3985
3986/** Opcode 0x0f 0x5c - subps Vps, Wps */
3987FNIEMOP_DEF(iemOp_subps_Vps_Wps)
3988{
3989 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3990 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
3991}
3992
3993
3994/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3995FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
3996{
3997 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3998 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
3999}
4000
4001
4002/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
4003FNIEMOP_STUB(iemOp_subss_Vss_Wss);
4004/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
4005FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
4006
4007
4008/** Opcode 0x0f 0x5d - minps Vps, Wps */
4009FNIEMOP_DEF(iemOp_minps_Vps_Wps)
4010{
4011 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4012 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
4013}
4014
4015
4016/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
4017FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
4018{
4019 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4020 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
4021}
4022
4023
4024/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
4025FNIEMOP_STUB(iemOp_minss_Vss_Wss);
4026/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
4027FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
4028
4029
4030/** Opcode 0x0f 0x5e - divps Vps, Wps */
4031FNIEMOP_DEF(iemOp_divps_Vps_Wps)
4032{
4033 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4034 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
4035}
4036
4037
4038/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
4039FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
4040{
4041 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4042 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
4043}
4044
4045
4046/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
4047FNIEMOP_STUB(iemOp_divss_Vss_Wss);
4048/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
4049FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
4050
4051/** Opcode 0x0f 0x5f - maxps Vps, Wps */
4052FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
4053/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
4054FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
4055/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
4056FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
4057/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
4058FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
4059
4060
4061/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
4062FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
4063{
4064 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4065 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
4066}
4067
4068
4069/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
4070FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
4071{
4072 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4073 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
4074}
4075
4076
4077/* Opcode 0xf3 0x0f 0x60 - invalid */
4078
4079
4080/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
4081FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
4082{
4083 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
4084 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4085 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
4086}
4087
4088
4089/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
4090FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
4091{
4092 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4093 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
4094}
4095
4096
4097/* Opcode 0xf3 0x0f 0x61 - invalid */
4098
4099
4100/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
4101FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
4102{
4103 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4104 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
4105}
4106
4107
4108/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
4109FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
4110{
4111 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4112 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
4113}
4114
4115
4116/* Opcode 0xf3 0x0f 0x62 - invalid */
4117
4118
4119
4120/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
4121FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
4122{
4123 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4124 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
4125}
4126
4127
4128/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
4129FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
4130{
4131 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4132 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
4133}
4134
4135
4136/* Opcode 0xf3 0x0f 0x63 - invalid */
4137
4138
4139/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
4140FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
4141{
4142 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4143 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
4144}
4145
4146
4147/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
4148FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
4149{
4150 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4151 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
4152}
4153
4154
4155/* Opcode 0xf3 0x0f 0x64 - invalid */
4156
4157
4158/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
4159FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
4160{
4161 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4162 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
4163}
4164
4165
4166/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
4167FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
4168{
4169 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4170 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
4171}
4172
4173
4174/* Opcode 0xf3 0x0f 0x65 - invalid */
4175
4176
4177/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
4178FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
4179{
4180 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4181 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
4182}
4183
4184
4185/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
4186FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
4187{
4188 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4189 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
4190}
4191
4192
4193/* Opcode 0xf3 0x0f 0x66 - invalid */
4194
4195
4196/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
4197FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
4198{
4199 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4200 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
4201}
4202
4203
4204/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
4205FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
4206{
4207 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4208 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
4209}
4210
4211
4212/* Opcode 0xf3 0x0f 0x67 - invalid */
4213
4214
4215/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
4216 * @note Intel and AMD both uses Qd for the second parameter, however they
4217 * both list it as a mmX/mem64 operand and intel describes it as being
4218 * loaded as a qword, so it should be Qq, shouldn't it? */
4219FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
4220{
4221 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4222 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
4223}
4224
4225
4226/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
4227FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
4228{
4229 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4230 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
4231}
4232
4233
4234/* Opcode 0xf3 0x0f 0x68 - invalid */
4235
4236
4237/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
4238 * @note Intel and AMD both uses Qd for the second parameter, however they
4239 * both list it as a mmX/mem64 operand and intel describes it as being
4240 * loaded as a qword, so it should be Qq, shouldn't it? */
4241FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
4242{
4243 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4244 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
4245}
4246
4247
4248/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
4249FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
4250{
4251 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4252 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
4253
4254}
4255
4256
4257/* Opcode 0xf3 0x0f 0x69 - invalid */
4258
4259
4260/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
4261 * @note Intel and AMD both uses Qd for the second parameter, however they
4262 * both list it as a mmX/mem64 operand and intel describes it as being
4263 * loaded as a qword, so it should be Qq, shouldn't it? */
4264FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
4265{
4266 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4267 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
4268}
4269
4270
4271/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
4272FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
4273{
4274 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4275 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
4276}
4277
4278
4279/* Opcode 0xf3 0x0f 0x6a - invalid */
4280
4281
4282/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
4283FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
4284{
4285 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4286 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
4287}
4288
4289
4290/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
4291FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
4292{
4293 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4294 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
4295}
4296
4297
4298/* Opcode 0xf3 0x0f 0x6b - invalid */
4299
4300
4301/* Opcode 0x0f 0x6c - invalid */
4302
4303
4304/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
4305FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
4306{
4307 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4308 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
4309}
4310
4311
4312/* Opcode 0xf3 0x0f 0x6c - invalid */
4313/* Opcode 0xf2 0x0f 0x6c - invalid */
4314
4315
4316/* Opcode 0x0f 0x6d - invalid */
4317
4318
4319/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
4320FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
4321{
4322 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4323 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
4324}
4325
4326
4327/* Opcode 0xf3 0x0f 0x6d - invalid */
4328
4329
4330FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
4331{
4332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4333 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4334 {
4335 /**
4336 * @opcode 0x6e
4337 * @opcodesub rex.w=1
4338 * @oppfx none
4339 * @opcpuid mmx
4340 * @opgroup og_mmx_datamove
4341 * @opxcpttype 5
4342 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4343 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4344 */
4345 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4346 if (IEM_IS_MODRM_REG_MODE(bRm))
4347 {
4348 /* MMX, greg64 */
4349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4350 IEM_MC_BEGIN(0, 1);
4351 IEM_MC_LOCAL(uint64_t, u64Tmp);
4352
4353 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4354 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4355
4356 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4357 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4358 IEM_MC_FPU_TO_MMX_MODE();
4359
4360 IEM_MC_ADVANCE_RIP();
4361 IEM_MC_END();
4362 }
4363 else
4364 {
4365 /* MMX, [mem64] */
4366 IEM_MC_BEGIN(0, 2);
4367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4368 IEM_MC_LOCAL(uint64_t, u64Tmp);
4369
4370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4373 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4374
4375 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4376 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4377 IEM_MC_FPU_TO_MMX_MODE();
4378
4379 IEM_MC_ADVANCE_RIP();
4380 IEM_MC_END();
4381 }
4382 }
4383 else
4384 {
4385 /**
4386 * @opdone
4387 * @opcode 0x6e
4388 * @opcodesub rex.w=0
4389 * @oppfx none
4390 * @opcpuid mmx
4391 * @opgroup og_mmx_datamove
4392 * @opxcpttype 5
4393 * @opfunction iemOp_movd_q_Pd_Ey
4394 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4395 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4396 */
4397 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4398 if (IEM_IS_MODRM_REG_MODE(bRm))
4399 {
4400 /* MMX, greg */
4401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4402 IEM_MC_BEGIN(0, 1);
4403 IEM_MC_LOCAL(uint64_t, u64Tmp);
4404
4405 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4406 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4407
4408 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4409 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4410 IEM_MC_FPU_TO_MMX_MODE();
4411
4412 IEM_MC_ADVANCE_RIP();
4413 IEM_MC_END();
4414 }
4415 else
4416 {
4417 /* MMX, [mem] */
4418 IEM_MC_BEGIN(0, 2);
4419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4420 IEM_MC_LOCAL(uint32_t, u32Tmp);
4421
4422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4424 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4425 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4426
4427 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4428 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4429 IEM_MC_FPU_TO_MMX_MODE();
4430
4431 IEM_MC_ADVANCE_RIP();
4432 IEM_MC_END();
4433 }
4434 }
4435 return VINF_SUCCESS;
4436}
4437
4438FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4439{
4440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4441 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4442 {
4443 /**
4444 * @opcode 0x6e
4445 * @opcodesub rex.w=1
4446 * @oppfx 0x66
4447 * @opcpuid sse2
4448 * @opgroup og_sse2_simdint_datamove
4449 * @opxcpttype 5
4450 * @optest 64-bit / op1=1 op2=2 -> op1=2
4451 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4452 */
4453 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4454 if (IEM_IS_MODRM_REG_MODE(bRm))
4455 {
4456 /* XMM, greg64 */
4457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4458 IEM_MC_BEGIN(0, 1);
4459 IEM_MC_LOCAL(uint64_t, u64Tmp);
4460
4461 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4463
4464 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4465 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4466
4467 IEM_MC_ADVANCE_RIP();
4468 IEM_MC_END();
4469 }
4470 else
4471 {
4472 /* XMM, [mem64] */
4473 IEM_MC_BEGIN(0, 2);
4474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4475 IEM_MC_LOCAL(uint64_t, u64Tmp);
4476
4477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4479 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4480 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4481
4482 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4483 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4484
4485 IEM_MC_ADVANCE_RIP();
4486 IEM_MC_END();
4487 }
4488 }
4489 else
4490 {
4491 /**
4492 * @opdone
4493 * @opcode 0x6e
4494 * @opcodesub rex.w=0
4495 * @oppfx 0x66
4496 * @opcpuid sse2
4497 * @opgroup og_sse2_simdint_datamove
4498 * @opxcpttype 5
4499 * @opfunction iemOp_movd_q_Vy_Ey
4500 * @optest op1=1 op2=2 -> op1=2
4501 * @optest op1=0 op2=-42 -> op1=-42
4502 */
4503 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4504 if (IEM_IS_MODRM_REG_MODE(bRm))
4505 {
4506 /* XMM, greg32 */
4507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4508 IEM_MC_BEGIN(0, 1);
4509 IEM_MC_LOCAL(uint32_t, u32Tmp);
4510
4511 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4512 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4513
4514 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4515 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4516
4517 IEM_MC_ADVANCE_RIP();
4518 IEM_MC_END();
4519 }
4520 else
4521 {
4522 /* XMM, [mem32] */
4523 IEM_MC_BEGIN(0, 2);
4524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4525 IEM_MC_LOCAL(uint32_t, u32Tmp);
4526
4527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4529 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4531
4532 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4533 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4534
4535 IEM_MC_ADVANCE_RIP();
4536 IEM_MC_END();
4537 }
4538 }
4539 return VINF_SUCCESS;
4540}
4541
4542/* Opcode 0xf3 0x0f 0x6e - invalid */
4543
4544
4545/**
4546 * @opcode 0x6f
4547 * @oppfx none
4548 * @opcpuid mmx
4549 * @opgroup og_mmx_datamove
4550 * @opxcpttype 5
4551 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4552 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4553 */
4554FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4555{
4556 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4558 if (IEM_IS_MODRM_REG_MODE(bRm))
4559 {
4560 /*
4561 * Register, register.
4562 */
4563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4564 IEM_MC_BEGIN(0, 1);
4565 IEM_MC_LOCAL(uint64_t, u64Tmp);
4566
4567 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4568 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4569
4570 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4571 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4572 IEM_MC_FPU_TO_MMX_MODE();
4573
4574 IEM_MC_ADVANCE_RIP();
4575 IEM_MC_END();
4576 }
4577 else
4578 {
4579 /*
4580 * Register, memory.
4581 */
4582 IEM_MC_BEGIN(0, 2);
4583 IEM_MC_LOCAL(uint64_t, u64Tmp);
4584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4585
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4588 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4589 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4590
4591 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4592 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4593 IEM_MC_FPU_TO_MMX_MODE();
4594
4595 IEM_MC_ADVANCE_RIP();
4596 IEM_MC_END();
4597 }
4598 return VINF_SUCCESS;
4599}
4600
4601/**
4602 * @opcode 0x6f
4603 * @oppfx 0x66
4604 * @opcpuid sse2
4605 * @opgroup og_sse2_simdint_datamove
4606 * @opxcpttype 1
4607 * @optest op1=1 op2=2 -> op1=2
4608 * @optest op1=0 op2=-42 -> op1=-42
4609 */
4610FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4611{
4612 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4614 if (IEM_IS_MODRM_REG_MODE(bRm))
4615 {
4616 /*
4617 * Register, register.
4618 */
4619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4620 IEM_MC_BEGIN(0, 0);
4621
4622 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4623 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4624
4625 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4626 IEM_GET_MODRM_RM(pVCpu, bRm));
4627 IEM_MC_ADVANCE_RIP();
4628 IEM_MC_END();
4629 }
4630 else
4631 {
4632 /*
4633 * Register, memory.
4634 */
4635 IEM_MC_BEGIN(0, 2);
4636 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4638
4639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4641 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4642 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4643
4644 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4645 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4646
4647 IEM_MC_ADVANCE_RIP();
4648 IEM_MC_END();
4649 }
4650 return VINF_SUCCESS;
4651}
4652
4653/**
4654 * @opcode 0x6f
4655 * @oppfx 0xf3
4656 * @opcpuid sse2
4657 * @opgroup og_sse2_simdint_datamove
4658 * @opxcpttype 4UA
4659 * @optest op1=1 op2=2 -> op1=2
4660 * @optest op1=0 op2=-42 -> op1=-42
4661 */
4662FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4663{
4664 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4666 if (IEM_IS_MODRM_REG_MODE(bRm))
4667 {
4668 /*
4669 * Register, register.
4670 */
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672 IEM_MC_BEGIN(0, 0);
4673 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4674 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4675 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4676 IEM_GET_MODRM_RM(pVCpu, bRm));
4677 IEM_MC_ADVANCE_RIP();
4678 IEM_MC_END();
4679 }
4680 else
4681 {
4682 /*
4683 * Register, memory.
4684 */
4685 IEM_MC_BEGIN(0, 2);
4686 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4688
4689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4691 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4692 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4693 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4694 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4695
4696 IEM_MC_ADVANCE_RIP();
4697 IEM_MC_END();
4698 }
4699 return VINF_SUCCESS;
4700}
4701
4702
4703/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
4704FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
4705{
4706 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4708 if (IEM_IS_MODRM_REG_MODE(bRm))
4709 {
4710 /*
4711 * Register, register.
4712 */
4713 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4715
4716 IEM_MC_BEGIN(3, 0);
4717 IEM_MC_ARG(uint64_t *, pDst, 0);
4718 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4719 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4720 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4721 IEM_MC_PREPARE_FPU_USAGE();
4722 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4723 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
4724 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4725 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4726 IEM_MC_FPU_TO_MMX_MODE();
4727 IEM_MC_ADVANCE_RIP();
4728 IEM_MC_END();
4729 }
4730 else
4731 {
4732 /*
4733 * Register, memory.
4734 */
4735 IEM_MC_BEGIN(3, 2);
4736 IEM_MC_ARG(uint64_t *, pDst, 0);
4737 IEM_MC_LOCAL(uint64_t, uSrc);
4738 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4740
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4742 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4743 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4745 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4746
4747 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4748 IEM_MC_PREPARE_FPU_USAGE();
4749 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4750 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4751 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4752 IEM_MC_FPU_TO_MMX_MODE();
4753
4754 IEM_MC_ADVANCE_RIP();
4755 IEM_MC_END();
4756 }
4757 return VINF_SUCCESS;
4758}
4759
4760
4761/**
4762 * Common worker for SSE2 instructions on the forms:
4763 * pshufd xmm1, xmm2/mem128, imm8
4764 * pshufhw xmm1, xmm2/mem128, imm8
4765 * pshuflw xmm1, xmm2/mem128, imm8
4766 *
4767 * Proper alignment of the 128-bit operand is enforced.
4768 * Exceptions type 4. SSE2 cpuid checks.
4769 */
4770FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
4771{
4772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4773 if (IEM_IS_MODRM_REG_MODE(bRm))
4774 {
4775 /*
4776 * Register, register.
4777 */
4778 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4780
4781 IEM_MC_BEGIN(3, 0);
4782 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4783 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4784 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4785 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4786 IEM_MC_PREPARE_SSE_USAGE();
4787 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4788 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4789 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4790 IEM_MC_ADVANCE_RIP();
4791 IEM_MC_END();
4792 }
4793 else
4794 {
4795 /*
4796 * Register, memory.
4797 */
4798 IEM_MC_BEGIN(3, 2);
4799 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4800 IEM_MC_LOCAL(RTUINT128U, uSrc);
4801 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4802 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4803
4804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4805 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4806 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4808 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4809
4810 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4811 IEM_MC_PREPARE_SSE_USAGE();
4812 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4813 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4814
4815 IEM_MC_ADVANCE_RIP();
4816 IEM_MC_END();
4817 }
4818 return VINF_SUCCESS;
4819}
4820
4821
4822/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4823FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4824{
4825 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4826 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
4827}
4828
4829
4830/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
4831FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
4832{
4833 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4834 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
4835}
4836
4837
4838/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4839FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4840{
4841 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4842 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
4843}
4844
4845
4846/**
4847 * Common worker for MMX instructions of the form:
4848 * psrlw mm, imm8
4849 * psraw mm, imm8
4850 * psllw mm, imm8
4851 * psrld mm, imm8
4852 * psrad mm, imm8
4853 * pslld mm, imm8
4854 * psrlq mm, imm8
4855 * psllq mm, imm8
4856 *
4857 */
4858FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
4859{
4860 if (IEM_IS_MODRM_REG_MODE(bRm))
4861 {
4862 /*
4863 * Register, immediate.
4864 */
4865 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4867
4868 IEM_MC_BEGIN(2, 0);
4869 IEM_MC_ARG(uint64_t *, pDst, 0);
4870 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4871 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4872 IEM_MC_PREPARE_FPU_USAGE();
4873 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4874 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
4875 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4876 IEM_MC_FPU_TO_MMX_MODE();
4877 IEM_MC_ADVANCE_RIP();
4878 IEM_MC_END();
4879 }
4880 else
4881 {
4882 /*
4883 * Register, memory not supported.
4884 */
4885 /// @todo Caller already enforced register mode?!
4886 }
4887 return VINF_SUCCESS;
4888}
4889
4890
4891/**
4892 * Common worker for SSE2 instructions of the form:
4893 * psrlw xmm, imm8
4894 * psraw xmm, imm8
4895 * psllw xmm, imm8
4896 * psrld xmm, imm8
4897 * psrad xmm, imm8
4898 * pslld xmm, imm8
4899 * psrlq xmm, imm8
4900 * psllq xmm, imm8
4901 *
4902 */
4903FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
4904{
4905 if (IEM_IS_MODRM_REG_MODE(bRm))
4906 {
4907 /*
4908 * Register, immediate.
4909 */
4910 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4912
4913 IEM_MC_BEGIN(2, 0);
4914 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4915 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4916 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4917 IEM_MC_PREPARE_SSE_USAGE();
4918 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4919 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
4920 IEM_MC_ADVANCE_RIP();
4921 IEM_MC_END();
4922 }
4923 else
4924 {
4925 /*
4926 * Register, memory.
4927 */
4928 /// @todo Caller already enforced register mode?!
4929 }
4930 return VINF_SUCCESS;
4931}
4932
4933
4934/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
4935FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
4936{
4937// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4938 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
4939}
4940
4941
4942/** Opcode 0x66 0x0f 0x71 11/2. */
4943FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
4944{
4945// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4946 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
4947}
4948
4949
4950/** Opcode 0x0f 0x71 11/4. */
4951FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
4952{
4953// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4954 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
4955}
4956
4957
4958/** Opcode 0x66 0x0f 0x71 11/4. */
4959FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
4960{
4961// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4962 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
4963}
4964
4965
4966/** Opcode 0x0f 0x71 11/6. */
4967FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
4968{
4969// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4970 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
4971}
4972
4973
4974/** Opcode 0x66 0x0f 0x71 11/6. */
4975FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
4976{
4977// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4978 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
4979}
4980
4981
4982/**
4983 * Group 12 jump table for register variant.
4984 */
4985IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4986{
4987 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4988 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4989 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4990 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4991 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4992 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4993 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4994 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4995};
4996AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4997
4998
4999/** Opcode 0x0f 0x71. */
5000FNIEMOP_DEF(iemOp_Grp12)
5001{
5002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5003 if (IEM_IS_MODRM_REG_MODE(bRm))
5004 /* register, register */
5005 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5006 + pVCpu->iem.s.idxPrefix], bRm);
5007 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5008}
5009
5010
5011/** Opcode 0x0f 0x72 11/2. */
5012FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
5013{
5014// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5015 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
5016}
5017
5018
5019/** Opcode 0x66 0x0f 0x72 11/2. */
5020FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
5021{
5022// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5023 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
5024}
5025
5026
5027/** Opcode 0x0f 0x72 11/4. */
5028FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
5029{
5030// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5031 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
5032}
5033
5034
5035/** Opcode 0x66 0x0f 0x72 11/4. */
5036FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
5037{
5038// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5039 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
5040}
5041
5042
5043/** Opcode 0x0f 0x72 11/6. */
5044FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
5045{
5046// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5047 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
5048}
5049
5050/** Opcode 0x66 0x0f 0x72 11/6. */
5051FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
5052{
5053// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5054 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
5055}
5056
5057
5058/**
5059 * Group 13 jump table for register variant.
5060 */
5061IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
5062{
5063 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5064 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5065 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5066 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5067 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5068 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5069 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5070 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5071};
5072AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
5073
5074/** Opcode 0x0f 0x72. */
5075FNIEMOP_DEF(iemOp_Grp13)
5076{
5077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5078 if (IEM_IS_MODRM_REG_MODE(bRm))
5079 /* register, register */
5080 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5081 + pVCpu->iem.s.idxPrefix], bRm);
5082 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5083}
5084
5085
5086/** Opcode 0x0f 0x73 11/2. */
5087FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
5088{
5089// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5090 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
5091}
5092
5093
5094/** Opcode 0x66 0x0f 0x73 11/2. */
5095FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
5096{
5097// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5098 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
5099}
5100
5101
5102/** Opcode 0x66 0x0f 0x73 11/3. */
5103FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
5104{
5105// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5106 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
5107}
5108
5109
5110/** Opcode 0x0f 0x73 11/6. */
5111FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
5112{
5113// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5114 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
5115}
5116
5117
5118/** Opcode 0x66 0x0f 0x73 11/6. */
5119FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
5120{
5121// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5122 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
5123}
5124
5125
5126/** Opcode 0x66 0x0f 0x73 11/7. */
5127FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
5128{
5129// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5130 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
5131}
5132
5133/**
5134 * Group 14 jump table for register variant.
5135 */
5136IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
5137{
5138 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5139 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5140 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5141 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5142 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5143 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5144 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5145 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5146};
5147AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
5148
5149
5150/** Opcode 0x0f 0x73. */
5151FNIEMOP_DEF(iemOp_Grp14)
5152{
5153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5154 if (IEM_IS_MODRM_REG_MODE(bRm))
5155 /* register, register */
5156 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5157 + pVCpu->iem.s.idxPrefix], bRm);
5158 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5159}
5160
5161
5162/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
5163FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
5164{
5165 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5166 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
5167}
5168
5169
5170/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
5171FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
5172{
5173 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5174 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
5175}
5176
5177
5178/* Opcode 0xf3 0x0f 0x74 - invalid */
5179/* Opcode 0xf2 0x0f 0x74 - invalid */
5180
5181
5182/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
5183FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
5184{
5185 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5186 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
5187}
5188
5189
5190/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
5191FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
5192{
5193 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5194 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
5195}
5196
5197
5198/* Opcode 0xf3 0x0f 0x75 - invalid */
5199/* Opcode 0xf2 0x0f 0x75 - invalid */
5200
5201
5202/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
5203FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
5204{
5205 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5206 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
5207}
5208
5209
5210/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
5211FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
5212{
5213 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5214 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
5215}
5216
5217
5218/* Opcode 0xf3 0x0f 0x76 - invalid */
5219/* Opcode 0xf2 0x0f 0x76 - invalid */
5220
5221
5222/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
5223FNIEMOP_DEF(iemOp_emms)
5224{
5225 IEMOP_MNEMONIC(emms, "emms");
5226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5227
5228 IEM_MC_BEGIN(0,0);
5229 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
5230 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5231 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5232 IEM_MC_FPU_FROM_MMX_MODE();
5233 IEM_MC_ADVANCE_RIP();
5234 IEM_MC_END();
5235 return VINF_SUCCESS;
5236}
5237
5238/* Opcode 0x66 0x0f 0x77 - invalid */
5239/* Opcode 0xf3 0x0f 0x77 - invalid */
5240/* Opcode 0xf2 0x0f 0x77 - invalid */
5241
5242/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
5243#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5244FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
5245{
5246 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
5247 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
5248 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
5249 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5250
5251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5252 if (IEM_IS_MODRM_REG_MODE(bRm))
5253 {
5254 /*
5255 * Register, register.
5256 */
5257 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5258 if (enmEffOpSize == IEMMODE_64BIT)
5259 {
5260 IEM_MC_BEGIN(2, 0);
5261 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5262 IEM_MC_ARG(uint64_t, u64Enc, 1);
5263 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5264 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5265 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
5266 IEM_MC_END();
5267 }
5268 else
5269 {
5270 IEM_MC_BEGIN(2, 0);
5271 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5272 IEM_MC_ARG(uint32_t, u32Enc, 1);
5273 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5274 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5275 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
5276 IEM_MC_END();
5277 }
5278 }
5279 else
5280 {
5281 /*
5282 * Memory, register.
5283 */
5284 if (enmEffOpSize == IEMMODE_64BIT)
5285 {
5286 IEM_MC_BEGIN(3, 0);
5287 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5288 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5289 IEM_MC_ARG(uint64_t, u64Enc, 2);
5290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5291 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5292 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5293 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5294 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
5295 IEM_MC_END();
5296 }
5297 else
5298 {
5299 IEM_MC_BEGIN(3, 0);
5300 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5301 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5302 IEM_MC_ARG(uint32_t, u32Enc, 2);
5303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5304 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5305 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5306 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5307 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
5308 IEM_MC_END();
5309 }
5310 }
5311 return VINF_SUCCESS;
5312}
5313#else
5314FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
5315#endif
5316
5317/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
5318FNIEMOP_STUB(iemOp_AmdGrp17);
5319/* Opcode 0xf3 0x0f 0x78 - invalid */
5320/* Opcode 0xf2 0x0f 0x78 - invalid */
5321
5322/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
5323#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5324FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
5325{
5326 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
5327 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
5328 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
5329 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5330
5331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5332 if (IEM_IS_MODRM_REG_MODE(bRm))
5333 {
5334 /*
5335 * Register, register.
5336 */
5337 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5338 if (enmEffOpSize == IEMMODE_64BIT)
5339 {
5340 IEM_MC_BEGIN(2, 0);
5341 IEM_MC_ARG(uint64_t, u64Val, 0);
5342 IEM_MC_ARG(uint64_t, u64Enc, 1);
5343 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5344 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5345 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
5346 IEM_MC_END();
5347 }
5348 else
5349 {
5350 IEM_MC_BEGIN(2, 0);
5351 IEM_MC_ARG(uint32_t, u32Val, 0);
5352 IEM_MC_ARG(uint32_t, u32Enc, 1);
5353 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5354 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5355 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
5356 IEM_MC_END();
5357 }
5358 }
5359 else
5360 {
5361 /*
5362 * Register, memory.
5363 */
5364 if (enmEffOpSize == IEMMODE_64BIT)
5365 {
5366 IEM_MC_BEGIN(3, 0);
5367 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5368 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5369 IEM_MC_ARG(uint64_t, u64Enc, 2);
5370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5371 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5372 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5373 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5374 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
5375 IEM_MC_END();
5376 }
5377 else
5378 {
5379 IEM_MC_BEGIN(3, 0);
5380 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5381 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5382 IEM_MC_ARG(uint32_t, u32Enc, 2);
5383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5384 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5385 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5386 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5387 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
5388 IEM_MC_END();
5389 }
5390 }
5391 return VINF_SUCCESS;
5392}
5393#else
5394FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
5395#endif
5396/* Opcode 0x66 0x0f 0x79 - invalid */
5397/* Opcode 0xf3 0x0f 0x79 - invalid */
5398/* Opcode 0xf2 0x0f 0x79 - invalid */
5399
5400/* Opcode 0x0f 0x7a - invalid */
5401/* Opcode 0x66 0x0f 0x7a - invalid */
5402/* Opcode 0xf3 0x0f 0x7a - invalid */
5403/* Opcode 0xf2 0x0f 0x7a - invalid */
5404
5405/* Opcode 0x0f 0x7b - invalid */
5406/* Opcode 0x66 0x0f 0x7b - invalid */
5407/* Opcode 0xf3 0x0f 0x7b - invalid */
5408/* Opcode 0xf2 0x0f 0x7b - invalid */
5409
5410/* Opcode 0x0f 0x7c - invalid */
5411/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5412FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
5413/* Opcode 0xf3 0x0f 0x7c - invalid */
5414/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5415FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
5416
5417/* Opcode 0x0f 0x7d - invalid */
5418/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5419FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
5420/* Opcode 0xf3 0x0f 0x7d - invalid */
5421/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5422FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
5423
5424
5425/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5426FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5427{
5428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5429 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5430 {
5431 /**
5432 * @opcode 0x7e
5433 * @opcodesub rex.w=1
5434 * @oppfx none
5435 * @opcpuid mmx
5436 * @opgroup og_mmx_datamove
5437 * @opxcpttype 5
5438 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5439 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5440 */
5441 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5442 if (IEM_IS_MODRM_REG_MODE(bRm))
5443 {
5444 /* greg64, MMX */
5445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5446 IEM_MC_BEGIN(0, 1);
5447 IEM_MC_LOCAL(uint64_t, u64Tmp);
5448
5449 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5450 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5451
5452 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5453 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5454 IEM_MC_FPU_TO_MMX_MODE();
5455
5456 IEM_MC_ADVANCE_RIP();
5457 IEM_MC_END();
5458 }
5459 else
5460 {
5461 /* [mem64], MMX */
5462 IEM_MC_BEGIN(0, 2);
5463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5464 IEM_MC_LOCAL(uint64_t, u64Tmp);
5465
5466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5468 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5469 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5470
5471 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5472 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5473 IEM_MC_FPU_TO_MMX_MODE();
5474
5475 IEM_MC_ADVANCE_RIP();
5476 IEM_MC_END();
5477 }
5478 }
5479 else
5480 {
5481 /**
5482 * @opdone
5483 * @opcode 0x7e
5484 * @opcodesub rex.w=0
5485 * @oppfx none
5486 * @opcpuid mmx
5487 * @opgroup og_mmx_datamove
5488 * @opxcpttype 5
5489 * @opfunction iemOp_movd_q_Pd_Ey
5490 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5491 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5492 */
5493 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5494 if (IEM_IS_MODRM_REG_MODE(bRm))
5495 {
5496 /* greg32, MMX */
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5498 IEM_MC_BEGIN(0, 1);
5499 IEM_MC_LOCAL(uint32_t, u32Tmp);
5500
5501 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5502 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5503
5504 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5505 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5506 IEM_MC_FPU_TO_MMX_MODE();
5507
5508 IEM_MC_ADVANCE_RIP();
5509 IEM_MC_END();
5510 }
5511 else
5512 {
5513 /* [mem32], MMX */
5514 IEM_MC_BEGIN(0, 2);
5515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5516 IEM_MC_LOCAL(uint32_t, u32Tmp);
5517
5518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5520 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5521 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5522
5523 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5524 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5525 IEM_MC_FPU_TO_MMX_MODE();
5526
5527 IEM_MC_ADVANCE_RIP();
5528 IEM_MC_END();
5529 }
5530 }
5531 return VINF_SUCCESS;
5532
5533}
5534
5535
5536FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5537{
5538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5539 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5540 {
5541 /**
5542 * @opcode 0x7e
5543 * @opcodesub rex.w=1
5544 * @oppfx 0x66
5545 * @opcpuid sse2
5546 * @opgroup og_sse2_simdint_datamove
5547 * @opxcpttype 5
5548 * @optest 64-bit / op1=1 op2=2 -> op1=2
5549 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5550 */
5551 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5552 if (IEM_IS_MODRM_REG_MODE(bRm))
5553 {
5554 /* greg64, XMM */
5555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5556 IEM_MC_BEGIN(0, 1);
5557 IEM_MC_LOCAL(uint64_t, u64Tmp);
5558
5559 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5560 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5561
5562 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5563 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5564
5565 IEM_MC_ADVANCE_RIP();
5566 IEM_MC_END();
5567 }
5568 else
5569 {
5570 /* [mem64], XMM */
5571 IEM_MC_BEGIN(0, 2);
5572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5573 IEM_MC_LOCAL(uint64_t, u64Tmp);
5574
5575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5579
5580 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5581 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5582
5583 IEM_MC_ADVANCE_RIP();
5584 IEM_MC_END();
5585 }
5586 }
5587 else
5588 {
5589 /**
5590 * @opdone
5591 * @opcode 0x7e
5592 * @opcodesub rex.w=0
5593 * @oppfx 0x66
5594 * @opcpuid sse2
5595 * @opgroup og_sse2_simdint_datamove
5596 * @opxcpttype 5
5597 * @opfunction iemOp_movd_q_Vy_Ey
5598 * @optest op1=1 op2=2 -> op1=2
5599 * @optest op1=0 op2=-42 -> op1=-42
5600 */
5601 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5602 if (IEM_IS_MODRM_REG_MODE(bRm))
5603 {
5604 /* greg32, XMM */
5605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5606 IEM_MC_BEGIN(0, 1);
5607 IEM_MC_LOCAL(uint32_t, u32Tmp);
5608
5609 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5610 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5611
5612 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5613 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5614
5615 IEM_MC_ADVANCE_RIP();
5616 IEM_MC_END();
5617 }
5618 else
5619 {
5620 /* [mem32], XMM */
5621 IEM_MC_BEGIN(0, 2);
5622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5623 IEM_MC_LOCAL(uint32_t, u32Tmp);
5624
5625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5627 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5628 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5629
5630 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5631 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5632
5633 IEM_MC_ADVANCE_RIP();
5634 IEM_MC_END();
5635 }
5636 }
5637 return VINF_SUCCESS;
5638
5639}
5640
5641/**
5642 * @opcode 0x7e
5643 * @oppfx 0xf3
5644 * @opcpuid sse2
5645 * @opgroup og_sse2_pcksclr_datamove
5646 * @opxcpttype none
5647 * @optest op1=1 op2=2 -> op1=2
5648 * @optest op1=0 op2=-42 -> op1=-42
5649 */
5650FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5651{
5652 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5654 if (IEM_IS_MODRM_REG_MODE(bRm))
5655 {
5656 /*
5657 * Register, register.
5658 */
5659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5660 IEM_MC_BEGIN(0, 2);
5661 IEM_MC_LOCAL(uint64_t, uSrc);
5662
5663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5664 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5665
5666 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5667 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5668
5669 IEM_MC_ADVANCE_RIP();
5670 IEM_MC_END();
5671 }
5672 else
5673 {
5674 /*
5675 * Memory, register.
5676 */
5677 IEM_MC_BEGIN(0, 2);
5678 IEM_MC_LOCAL(uint64_t, uSrc);
5679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5680
5681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5683 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5684 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5685
5686 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5687 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5688
5689 IEM_MC_ADVANCE_RIP();
5690 IEM_MC_END();
5691 }
5692 return VINF_SUCCESS;
5693}
5694
5695/* Opcode 0xf2 0x0f 0x7e - invalid */
5696
5697
5698/** Opcode 0x0f 0x7f - movq Qq, Pq */
5699FNIEMOP_DEF(iemOp_movq_Qq_Pq)
5700{
5701 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
5702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5703 if (IEM_IS_MODRM_REG_MODE(bRm))
5704 {
5705 /*
5706 * Register, register.
5707 */
5708 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
5709 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
5710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5711 IEM_MC_BEGIN(0, 1);
5712 IEM_MC_LOCAL(uint64_t, u64Tmp);
5713 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5714 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5715 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5716 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
5717 IEM_MC_FPU_TO_MMX_MODE();
5718 IEM_MC_ADVANCE_RIP();
5719 IEM_MC_END();
5720 }
5721 else
5722 {
5723 /*
5724 * Memory, Register.
5725 */
5726 IEM_MC_BEGIN(0, 2);
5727 IEM_MC_LOCAL(uint64_t, u64Tmp);
5728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5729
5730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5732 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5733 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5734
5735 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5736 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5737 IEM_MC_FPU_TO_MMX_MODE();
5738
5739 IEM_MC_ADVANCE_RIP();
5740 IEM_MC_END();
5741 }
5742 return VINF_SUCCESS;
5743}
5744
5745/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
5746FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
5747{
5748 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5750 if (IEM_IS_MODRM_REG_MODE(bRm))
5751 {
5752 /*
5753 * Register, register.
5754 */
5755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5756 IEM_MC_BEGIN(0, 0);
5757 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5758 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5759 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5760 IEM_GET_MODRM_REG(pVCpu, bRm));
5761 IEM_MC_ADVANCE_RIP();
5762 IEM_MC_END();
5763 }
5764 else
5765 {
5766 /*
5767 * Register, memory.
5768 */
5769 IEM_MC_BEGIN(0, 2);
5770 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5772
5773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5775 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5776 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5777
5778 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5779 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5780
5781 IEM_MC_ADVANCE_RIP();
5782 IEM_MC_END();
5783 }
5784 return VINF_SUCCESS;
5785}
5786
5787/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
5788FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
5789{
5790 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5792 if (IEM_IS_MODRM_REG_MODE(bRm))
5793 {
5794 /*
5795 * Register, register.
5796 */
5797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5798 IEM_MC_BEGIN(0, 0);
5799 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5800 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5801 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5802 IEM_GET_MODRM_REG(pVCpu, bRm));
5803 IEM_MC_ADVANCE_RIP();
5804 IEM_MC_END();
5805 }
5806 else
5807 {
5808 /*
5809 * Register, memory.
5810 */
5811 IEM_MC_BEGIN(0, 2);
5812 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5814
5815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5817 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5818 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5819
5820 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5821 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5822
5823 IEM_MC_ADVANCE_RIP();
5824 IEM_MC_END();
5825 }
5826 return VINF_SUCCESS;
5827}
5828
5829/* Opcode 0xf2 0x0f 0x7f - invalid */
5830
5831
5832
5833/** Opcode 0x0f 0x80. */
5834FNIEMOP_DEF(iemOp_jo_Jv)
5835{
5836 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
5837 IEMOP_HLP_MIN_386();
5838 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5839 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5840 {
5841 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5843
5844 IEM_MC_BEGIN(0, 0);
5845 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5846 IEM_MC_REL_JMP_S16(i16Imm);
5847 } IEM_MC_ELSE() {
5848 IEM_MC_ADVANCE_RIP();
5849 } IEM_MC_ENDIF();
5850 IEM_MC_END();
5851 }
5852 else
5853 {
5854 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5856
5857 IEM_MC_BEGIN(0, 0);
5858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5859 IEM_MC_REL_JMP_S32(i32Imm);
5860 } IEM_MC_ELSE() {
5861 IEM_MC_ADVANCE_RIP();
5862 } IEM_MC_ENDIF();
5863 IEM_MC_END();
5864 }
5865 return VINF_SUCCESS;
5866}
5867
5868
5869/** Opcode 0x0f 0x81. */
5870FNIEMOP_DEF(iemOp_jno_Jv)
5871{
5872 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
5873 IEMOP_HLP_MIN_386();
5874 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5875 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5876 {
5877 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5879
5880 IEM_MC_BEGIN(0, 0);
5881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5882 IEM_MC_ADVANCE_RIP();
5883 } IEM_MC_ELSE() {
5884 IEM_MC_REL_JMP_S16(i16Imm);
5885 } IEM_MC_ENDIF();
5886 IEM_MC_END();
5887 }
5888 else
5889 {
5890 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892
5893 IEM_MC_BEGIN(0, 0);
5894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5895 IEM_MC_ADVANCE_RIP();
5896 } IEM_MC_ELSE() {
5897 IEM_MC_REL_JMP_S32(i32Imm);
5898 } IEM_MC_ENDIF();
5899 IEM_MC_END();
5900 }
5901 return VINF_SUCCESS;
5902}
5903
5904
5905/** Opcode 0x0f 0x82. */
5906FNIEMOP_DEF(iemOp_jc_Jv)
5907{
5908 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
5909 IEMOP_HLP_MIN_386();
5910 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5911 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5912 {
5913 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5915
5916 IEM_MC_BEGIN(0, 0);
5917 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5918 IEM_MC_REL_JMP_S16(i16Imm);
5919 } IEM_MC_ELSE() {
5920 IEM_MC_ADVANCE_RIP();
5921 } IEM_MC_ENDIF();
5922 IEM_MC_END();
5923 }
5924 else
5925 {
5926 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928
5929 IEM_MC_BEGIN(0, 0);
5930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5931 IEM_MC_REL_JMP_S32(i32Imm);
5932 } IEM_MC_ELSE() {
5933 IEM_MC_ADVANCE_RIP();
5934 } IEM_MC_ENDIF();
5935 IEM_MC_END();
5936 }
5937 return VINF_SUCCESS;
5938}
5939
5940
5941/** Opcode 0x0f 0x83. */
5942FNIEMOP_DEF(iemOp_jnc_Jv)
5943{
5944 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5945 IEMOP_HLP_MIN_386();
5946 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5947 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5948 {
5949 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5951
5952 IEM_MC_BEGIN(0, 0);
5953 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5954 IEM_MC_ADVANCE_RIP();
5955 } IEM_MC_ELSE() {
5956 IEM_MC_REL_JMP_S16(i16Imm);
5957 } IEM_MC_ENDIF();
5958 IEM_MC_END();
5959 }
5960 else
5961 {
5962 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5964
5965 IEM_MC_BEGIN(0, 0);
5966 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5967 IEM_MC_ADVANCE_RIP();
5968 } IEM_MC_ELSE() {
5969 IEM_MC_REL_JMP_S32(i32Imm);
5970 } IEM_MC_ENDIF();
5971 IEM_MC_END();
5972 }
5973 return VINF_SUCCESS;
5974}
5975
5976
5977/** Opcode 0x0f 0x84. */
5978FNIEMOP_DEF(iemOp_je_Jv)
5979{
5980 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5981 IEMOP_HLP_MIN_386();
5982 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5983 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5984 {
5985 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5987
5988 IEM_MC_BEGIN(0, 0);
5989 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5990 IEM_MC_REL_JMP_S16(i16Imm);
5991 } IEM_MC_ELSE() {
5992 IEM_MC_ADVANCE_RIP();
5993 } IEM_MC_ENDIF();
5994 IEM_MC_END();
5995 }
5996 else
5997 {
5998 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6000
6001 IEM_MC_BEGIN(0, 0);
6002 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6003 IEM_MC_REL_JMP_S32(i32Imm);
6004 } IEM_MC_ELSE() {
6005 IEM_MC_ADVANCE_RIP();
6006 } IEM_MC_ENDIF();
6007 IEM_MC_END();
6008 }
6009 return VINF_SUCCESS;
6010}
6011
6012
6013/** Opcode 0x0f 0x85. */
6014FNIEMOP_DEF(iemOp_jne_Jv)
6015{
6016 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
6017 IEMOP_HLP_MIN_386();
6018 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6019 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6020 {
6021 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6023
6024 IEM_MC_BEGIN(0, 0);
6025 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6026 IEM_MC_ADVANCE_RIP();
6027 } IEM_MC_ELSE() {
6028 IEM_MC_REL_JMP_S16(i16Imm);
6029 } IEM_MC_ENDIF();
6030 IEM_MC_END();
6031 }
6032 else
6033 {
6034 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6036
6037 IEM_MC_BEGIN(0, 0);
6038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6039 IEM_MC_ADVANCE_RIP();
6040 } IEM_MC_ELSE() {
6041 IEM_MC_REL_JMP_S32(i32Imm);
6042 } IEM_MC_ENDIF();
6043 IEM_MC_END();
6044 }
6045 return VINF_SUCCESS;
6046}
6047
6048
6049/** Opcode 0x0f 0x86. */
6050FNIEMOP_DEF(iemOp_jbe_Jv)
6051{
6052 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
6053 IEMOP_HLP_MIN_386();
6054 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6055 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6056 {
6057 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6059
6060 IEM_MC_BEGIN(0, 0);
6061 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6062 IEM_MC_REL_JMP_S16(i16Imm);
6063 } IEM_MC_ELSE() {
6064 IEM_MC_ADVANCE_RIP();
6065 } IEM_MC_ENDIF();
6066 IEM_MC_END();
6067 }
6068 else
6069 {
6070 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6072
6073 IEM_MC_BEGIN(0, 0);
6074 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6075 IEM_MC_REL_JMP_S32(i32Imm);
6076 } IEM_MC_ELSE() {
6077 IEM_MC_ADVANCE_RIP();
6078 } IEM_MC_ENDIF();
6079 IEM_MC_END();
6080 }
6081 return VINF_SUCCESS;
6082}
6083
6084
6085/** Opcode 0x0f 0x87. */
6086FNIEMOP_DEF(iemOp_jnbe_Jv)
6087{
6088 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
6089 IEMOP_HLP_MIN_386();
6090 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6091 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6092 {
6093 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6095
6096 IEM_MC_BEGIN(0, 0);
6097 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6098 IEM_MC_ADVANCE_RIP();
6099 } IEM_MC_ELSE() {
6100 IEM_MC_REL_JMP_S16(i16Imm);
6101 } IEM_MC_ENDIF();
6102 IEM_MC_END();
6103 }
6104 else
6105 {
6106 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6108
6109 IEM_MC_BEGIN(0, 0);
6110 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6111 IEM_MC_ADVANCE_RIP();
6112 } IEM_MC_ELSE() {
6113 IEM_MC_REL_JMP_S32(i32Imm);
6114 } IEM_MC_ENDIF();
6115 IEM_MC_END();
6116 }
6117 return VINF_SUCCESS;
6118}
6119
6120
6121/** Opcode 0x0f 0x88. */
6122FNIEMOP_DEF(iemOp_js_Jv)
6123{
6124 IEMOP_MNEMONIC(js_Jv, "js Jv");
6125 IEMOP_HLP_MIN_386();
6126 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6127 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6128 {
6129 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6131
6132 IEM_MC_BEGIN(0, 0);
6133 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6134 IEM_MC_REL_JMP_S16(i16Imm);
6135 } IEM_MC_ELSE() {
6136 IEM_MC_ADVANCE_RIP();
6137 } IEM_MC_ENDIF();
6138 IEM_MC_END();
6139 }
6140 else
6141 {
6142 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6144
6145 IEM_MC_BEGIN(0, 0);
6146 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6147 IEM_MC_REL_JMP_S32(i32Imm);
6148 } IEM_MC_ELSE() {
6149 IEM_MC_ADVANCE_RIP();
6150 } IEM_MC_ENDIF();
6151 IEM_MC_END();
6152 }
6153 return VINF_SUCCESS;
6154}
6155
6156
6157/** Opcode 0x0f 0x89. */
6158FNIEMOP_DEF(iemOp_jns_Jv)
6159{
6160 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
6161 IEMOP_HLP_MIN_386();
6162 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6163 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6164 {
6165 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6167
6168 IEM_MC_BEGIN(0, 0);
6169 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6170 IEM_MC_ADVANCE_RIP();
6171 } IEM_MC_ELSE() {
6172 IEM_MC_REL_JMP_S16(i16Imm);
6173 } IEM_MC_ENDIF();
6174 IEM_MC_END();
6175 }
6176 else
6177 {
6178 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180
6181 IEM_MC_BEGIN(0, 0);
6182 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6183 IEM_MC_ADVANCE_RIP();
6184 } IEM_MC_ELSE() {
6185 IEM_MC_REL_JMP_S32(i32Imm);
6186 } IEM_MC_ENDIF();
6187 IEM_MC_END();
6188 }
6189 return VINF_SUCCESS;
6190}
6191
6192
6193/** Opcode 0x0f 0x8a. */
6194FNIEMOP_DEF(iemOp_jp_Jv)
6195{
6196 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
6197 IEMOP_HLP_MIN_386();
6198 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6199 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6200 {
6201 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6203
6204 IEM_MC_BEGIN(0, 0);
6205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6206 IEM_MC_REL_JMP_S16(i16Imm);
6207 } IEM_MC_ELSE() {
6208 IEM_MC_ADVANCE_RIP();
6209 } IEM_MC_ENDIF();
6210 IEM_MC_END();
6211 }
6212 else
6213 {
6214 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6216
6217 IEM_MC_BEGIN(0, 0);
6218 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6219 IEM_MC_REL_JMP_S32(i32Imm);
6220 } IEM_MC_ELSE() {
6221 IEM_MC_ADVANCE_RIP();
6222 } IEM_MC_ENDIF();
6223 IEM_MC_END();
6224 }
6225 return VINF_SUCCESS;
6226}
6227
6228
6229/** Opcode 0x0f 0x8b. */
6230FNIEMOP_DEF(iemOp_jnp_Jv)
6231{
6232 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
6233 IEMOP_HLP_MIN_386();
6234 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6235 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6236 {
6237 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6239
6240 IEM_MC_BEGIN(0, 0);
6241 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6242 IEM_MC_ADVANCE_RIP();
6243 } IEM_MC_ELSE() {
6244 IEM_MC_REL_JMP_S16(i16Imm);
6245 } IEM_MC_ENDIF();
6246 IEM_MC_END();
6247 }
6248 else
6249 {
6250 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6252
6253 IEM_MC_BEGIN(0, 0);
6254 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6255 IEM_MC_ADVANCE_RIP();
6256 } IEM_MC_ELSE() {
6257 IEM_MC_REL_JMP_S32(i32Imm);
6258 } IEM_MC_ENDIF();
6259 IEM_MC_END();
6260 }
6261 return VINF_SUCCESS;
6262}
6263
6264
6265/** Opcode 0x0f 0x8c. */
6266FNIEMOP_DEF(iemOp_jl_Jv)
6267{
6268 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
6269 IEMOP_HLP_MIN_386();
6270 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6271 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6272 {
6273 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6275
6276 IEM_MC_BEGIN(0, 0);
6277 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6278 IEM_MC_REL_JMP_S16(i16Imm);
6279 } IEM_MC_ELSE() {
6280 IEM_MC_ADVANCE_RIP();
6281 } IEM_MC_ENDIF();
6282 IEM_MC_END();
6283 }
6284 else
6285 {
6286 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6288
6289 IEM_MC_BEGIN(0, 0);
6290 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6291 IEM_MC_REL_JMP_S32(i32Imm);
6292 } IEM_MC_ELSE() {
6293 IEM_MC_ADVANCE_RIP();
6294 } IEM_MC_ENDIF();
6295 IEM_MC_END();
6296 }
6297 return VINF_SUCCESS;
6298}
6299
6300
6301/** Opcode 0x0f 0x8d. */
6302FNIEMOP_DEF(iemOp_jnl_Jv)
6303{
6304 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
6305 IEMOP_HLP_MIN_386();
6306 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6307 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6308 {
6309 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6311
6312 IEM_MC_BEGIN(0, 0);
6313 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6314 IEM_MC_ADVANCE_RIP();
6315 } IEM_MC_ELSE() {
6316 IEM_MC_REL_JMP_S16(i16Imm);
6317 } IEM_MC_ENDIF();
6318 IEM_MC_END();
6319 }
6320 else
6321 {
6322 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6324
6325 IEM_MC_BEGIN(0, 0);
6326 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6327 IEM_MC_ADVANCE_RIP();
6328 } IEM_MC_ELSE() {
6329 IEM_MC_REL_JMP_S32(i32Imm);
6330 } IEM_MC_ENDIF();
6331 IEM_MC_END();
6332 }
6333 return VINF_SUCCESS;
6334}
6335
6336
6337/** Opcode 0x0f 0x8e. */
6338FNIEMOP_DEF(iemOp_jle_Jv)
6339{
6340 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
6341 IEMOP_HLP_MIN_386();
6342 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6343 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6344 {
6345 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6347
6348 IEM_MC_BEGIN(0, 0);
6349 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6350 IEM_MC_REL_JMP_S16(i16Imm);
6351 } IEM_MC_ELSE() {
6352 IEM_MC_ADVANCE_RIP();
6353 } IEM_MC_ENDIF();
6354 IEM_MC_END();
6355 }
6356 else
6357 {
6358 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6360
6361 IEM_MC_BEGIN(0, 0);
6362 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6363 IEM_MC_REL_JMP_S32(i32Imm);
6364 } IEM_MC_ELSE() {
6365 IEM_MC_ADVANCE_RIP();
6366 } IEM_MC_ENDIF();
6367 IEM_MC_END();
6368 }
6369 return VINF_SUCCESS;
6370}
6371
6372
6373/** Opcode 0x0f 0x8f. */
6374FNIEMOP_DEF(iemOp_jnle_Jv)
6375{
6376 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
6377 IEMOP_HLP_MIN_386();
6378 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6379 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6380 {
6381 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6383
6384 IEM_MC_BEGIN(0, 0);
6385 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6386 IEM_MC_ADVANCE_RIP();
6387 } IEM_MC_ELSE() {
6388 IEM_MC_REL_JMP_S16(i16Imm);
6389 } IEM_MC_ENDIF();
6390 IEM_MC_END();
6391 }
6392 else
6393 {
6394 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6396
6397 IEM_MC_BEGIN(0, 0);
6398 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6399 IEM_MC_ADVANCE_RIP();
6400 } IEM_MC_ELSE() {
6401 IEM_MC_REL_JMP_S32(i32Imm);
6402 } IEM_MC_ENDIF();
6403 IEM_MC_END();
6404 }
6405 return VINF_SUCCESS;
6406}
6407
6408
6409/** Opcode 0x0f 0x90. */
6410FNIEMOP_DEF(iemOp_seto_Eb)
6411{
6412 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6413 IEMOP_HLP_MIN_386();
6414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6415
6416 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6417 * any way. AMD says it's "unused", whatever that means. We're
6418 * ignoring for now. */
6419 if (IEM_IS_MODRM_REG_MODE(bRm))
6420 {
6421 /* register target */
6422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6423 IEM_MC_BEGIN(0, 0);
6424 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6425 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6426 } IEM_MC_ELSE() {
6427 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6428 } IEM_MC_ENDIF();
6429 IEM_MC_ADVANCE_RIP();
6430 IEM_MC_END();
6431 }
6432 else
6433 {
6434 /* memory target */
6435 IEM_MC_BEGIN(0, 1);
6436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6440 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6441 } IEM_MC_ELSE() {
6442 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6443 } IEM_MC_ENDIF();
6444 IEM_MC_ADVANCE_RIP();
6445 IEM_MC_END();
6446 }
6447 return VINF_SUCCESS;
6448}
6449
6450
6451/** Opcode 0x0f 0x91. */
6452FNIEMOP_DEF(iemOp_setno_Eb)
6453{
6454 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6455 IEMOP_HLP_MIN_386();
6456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6457
6458 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6459 * any way. AMD says it's "unused", whatever that means. We're
6460 * ignoring for now. */
6461 if (IEM_IS_MODRM_REG_MODE(bRm))
6462 {
6463 /* register target */
6464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6465 IEM_MC_BEGIN(0, 0);
6466 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6467 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6468 } IEM_MC_ELSE() {
6469 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6470 } IEM_MC_ENDIF();
6471 IEM_MC_ADVANCE_RIP();
6472 IEM_MC_END();
6473 }
6474 else
6475 {
6476 /* memory target */
6477 IEM_MC_BEGIN(0, 1);
6478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6482 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6483 } IEM_MC_ELSE() {
6484 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6485 } IEM_MC_ENDIF();
6486 IEM_MC_ADVANCE_RIP();
6487 IEM_MC_END();
6488 }
6489 return VINF_SUCCESS;
6490}
6491
6492
6493/** Opcode 0x0f 0x92. */
6494FNIEMOP_DEF(iemOp_setc_Eb)
6495{
6496 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6497 IEMOP_HLP_MIN_386();
6498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6499
6500 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6501 * any way. AMD says it's "unused", whatever that means. We're
6502 * ignoring for now. */
6503 if (IEM_IS_MODRM_REG_MODE(bRm))
6504 {
6505 /* register target */
6506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6507 IEM_MC_BEGIN(0, 0);
6508 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6509 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6510 } IEM_MC_ELSE() {
6511 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6512 } IEM_MC_ENDIF();
6513 IEM_MC_ADVANCE_RIP();
6514 IEM_MC_END();
6515 }
6516 else
6517 {
6518 /* memory target */
6519 IEM_MC_BEGIN(0, 1);
6520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6523 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6524 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6525 } IEM_MC_ELSE() {
6526 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6527 } IEM_MC_ENDIF();
6528 IEM_MC_ADVANCE_RIP();
6529 IEM_MC_END();
6530 }
6531 return VINF_SUCCESS;
6532}
6533
6534
6535/** Opcode 0x0f 0x93. */
6536FNIEMOP_DEF(iemOp_setnc_Eb)
6537{
6538 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6539 IEMOP_HLP_MIN_386();
6540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6541
6542 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6543 * any way. AMD says it's "unused", whatever that means. We're
6544 * ignoring for now. */
6545 if (IEM_IS_MODRM_REG_MODE(bRm))
6546 {
6547 /* register target */
6548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6549 IEM_MC_BEGIN(0, 0);
6550 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6551 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6552 } IEM_MC_ELSE() {
6553 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6554 } IEM_MC_ENDIF();
6555 IEM_MC_ADVANCE_RIP();
6556 IEM_MC_END();
6557 }
6558 else
6559 {
6560 /* memory target */
6561 IEM_MC_BEGIN(0, 1);
6562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6565 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6566 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6567 } IEM_MC_ELSE() {
6568 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6569 } IEM_MC_ENDIF();
6570 IEM_MC_ADVANCE_RIP();
6571 IEM_MC_END();
6572 }
6573 return VINF_SUCCESS;
6574}
6575
6576
6577/** Opcode 0x0f 0x94. */
6578FNIEMOP_DEF(iemOp_sete_Eb)
6579{
6580 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6581 IEMOP_HLP_MIN_386();
6582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6583
6584 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6585 * any way. AMD says it's "unused", whatever that means. We're
6586 * ignoring for now. */
6587 if (IEM_IS_MODRM_REG_MODE(bRm))
6588 {
6589 /* register target */
6590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6591 IEM_MC_BEGIN(0, 0);
6592 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6593 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6594 } IEM_MC_ELSE() {
6595 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6596 } IEM_MC_ENDIF();
6597 IEM_MC_ADVANCE_RIP();
6598 IEM_MC_END();
6599 }
6600 else
6601 {
6602 /* memory target */
6603 IEM_MC_BEGIN(0, 1);
6604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6607 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6608 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6609 } IEM_MC_ELSE() {
6610 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6611 } IEM_MC_ENDIF();
6612 IEM_MC_ADVANCE_RIP();
6613 IEM_MC_END();
6614 }
6615 return VINF_SUCCESS;
6616}
6617
6618
6619/** Opcode 0x0f 0x95. */
6620FNIEMOP_DEF(iemOp_setne_Eb)
6621{
6622 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6623 IEMOP_HLP_MIN_386();
6624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6625
6626 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6627 * any way. AMD says it's "unused", whatever that means. We're
6628 * ignoring for now. */
6629 if (IEM_IS_MODRM_REG_MODE(bRm))
6630 {
6631 /* register target */
6632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6633 IEM_MC_BEGIN(0, 0);
6634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6635 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6636 } IEM_MC_ELSE() {
6637 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6638 } IEM_MC_ENDIF();
6639 IEM_MC_ADVANCE_RIP();
6640 IEM_MC_END();
6641 }
6642 else
6643 {
6644 /* memory target */
6645 IEM_MC_BEGIN(0, 1);
6646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6649 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6650 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6651 } IEM_MC_ELSE() {
6652 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6653 } IEM_MC_ENDIF();
6654 IEM_MC_ADVANCE_RIP();
6655 IEM_MC_END();
6656 }
6657 return VINF_SUCCESS;
6658}
6659
6660
6661/** Opcode 0x0f 0x96. */
6662FNIEMOP_DEF(iemOp_setbe_Eb)
6663{
6664 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
6665 IEMOP_HLP_MIN_386();
6666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6667
6668 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6669 * any way. AMD says it's "unused", whatever that means. We're
6670 * ignoring for now. */
6671 if (IEM_IS_MODRM_REG_MODE(bRm))
6672 {
6673 /* register target */
6674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6675 IEM_MC_BEGIN(0, 0);
6676 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6677 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6678 } IEM_MC_ELSE() {
6679 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6680 } IEM_MC_ENDIF();
6681 IEM_MC_ADVANCE_RIP();
6682 IEM_MC_END();
6683 }
6684 else
6685 {
6686 /* memory target */
6687 IEM_MC_BEGIN(0, 1);
6688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6691 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6692 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6693 } IEM_MC_ELSE() {
6694 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6695 } IEM_MC_ENDIF();
6696 IEM_MC_ADVANCE_RIP();
6697 IEM_MC_END();
6698 }
6699 return VINF_SUCCESS;
6700}
6701
6702
6703/** Opcode 0x0f 0x97. */
6704FNIEMOP_DEF(iemOp_setnbe_Eb)
6705{
6706 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
6707 IEMOP_HLP_MIN_386();
6708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6709
6710 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6711 * any way. AMD says it's "unused", whatever that means. We're
6712 * ignoring for now. */
6713 if (IEM_IS_MODRM_REG_MODE(bRm))
6714 {
6715 /* register target */
6716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6717 IEM_MC_BEGIN(0, 0);
6718 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6719 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6720 } IEM_MC_ELSE() {
6721 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6722 } IEM_MC_ENDIF();
6723 IEM_MC_ADVANCE_RIP();
6724 IEM_MC_END();
6725 }
6726 else
6727 {
6728 /* memory target */
6729 IEM_MC_BEGIN(0, 1);
6730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6733 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6734 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6735 } IEM_MC_ELSE() {
6736 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6737 } IEM_MC_ENDIF();
6738 IEM_MC_ADVANCE_RIP();
6739 IEM_MC_END();
6740 }
6741 return VINF_SUCCESS;
6742}
6743
6744
6745/** Opcode 0x0f 0x98. */
6746FNIEMOP_DEF(iemOp_sets_Eb)
6747{
6748 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
6749 IEMOP_HLP_MIN_386();
6750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6751
6752 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6753 * any way. AMD says it's "unused", whatever that means. We're
6754 * ignoring for now. */
6755 if (IEM_IS_MODRM_REG_MODE(bRm))
6756 {
6757 /* register target */
6758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6759 IEM_MC_BEGIN(0, 0);
6760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6761 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6762 } IEM_MC_ELSE() {
6763 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6764 } IEM_MC_ENDIF();
6765 IEM_MC_ADVANCE_RIP();
6766 IEM_MC_END();
6767 }
6768 else
6769 {
6770 /* memory target */
6771 IEM_MC_BEGIN(0, 1);
6772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6775 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6776 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6777 } IEM_MC_ELSE() {
6778 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6779 } IEM_MC_ENDIF();
6780 IEM_MC_ADVANCE_RIP();
6781 IEM_MC_END();
6782 }
6783 return VINF_SUCCESS;
6784}
6785
6786
6787/** Opcode 0x0f 0x99. */
6788FNIEMOP_DEF(iemOp_setns_Eb)
6789{
6790 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
6791 IEMOP_HLP_MIN_386();
6792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6793
6794 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6795 * any way. AMD says it's "unused", whatever that means. We're
6796 * ignoring for now. */
6797 if (IEM_IS_MODRM_REG_MODE(bRm))
6798 {
6799 /* register target */
6800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6801 IEM_MC_BEGIN(0, 0);
6802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6803 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6804 } IEM_MC_ELSE() {
6805 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6806 } IEM_MC_ENDIF();
6807 IEM_MC_ADVANCE_RIP();
6808 IEM_MC_END();
6809 }
6810 else
6811 {
6812 /* memory target */
6813 IEM_MC_BEGIN(0, 1);
6814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6818 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6819 } IEM_MC_ELSE() {
6820 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6821 } IEM_MC_ENDIF();
6822 IEM_MC_ADVANCE_RIP();
6823 IEM_MC_END();
6824 }
6825 return VINF_SUCCESS;
6826}
6827
6828
6829/** Opcode 0x0f 0x9a. */
6830FNIEMOP_DEF(iemOp_setp_Eb)
6831{
6832 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
6833 IEMOP_HLP_MIN_386();
6834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6835
6836 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6837 * any way. AMD says it's "unused", whatever that means. We're
6838 * ignoring for now. */
6839 if (IEM_IS_MODRM_REG_MODE(bRm))
6840 {
6841 /* register target */
6842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6843 IEM_MC_BEGIN(0, 0);
6844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6845 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6846 } IEM_MC_ELSE() {
6847 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6848 } IEM_MC_ENDIF();
6849 IEM_MC_ADVANCE_RIP();
6850 IEM_MC_END();
6851 }
6852 else
6853 {
6854 /* memory target */
6855 IEM_MC_BEGIN(0, 1);
6856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6859 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6860 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6861 } IEM_MC_ELSE() {
6862 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6863 } IEM_MC_ENDIF();
6864 IEM_MC_ADVANCE_RIP();
6865 IEM_MC_END();
6866 }
6867 return VINF_SUCCESS;
6868}
6869
6870
6871/** Opcode 0x0f 0x9b. */
6872FNIEMOP_DEF(iemOp_setnp_Eb)
6873{
6874 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
6875 IEMOP_HLP_MIN_386();
6876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6877
6878 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6879 * any way. AMD says it's "unused", whatever that means. We're
6880 * ignoring for now. */
6881 if (IEM_IS_MODRM_REG_MODE(bRm))
6882 {
6883 /* register target */
6884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6885 IEM_MC_BEGIN(0, 0);
6886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6887 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6888 } IEM_MC_ELSE() {
6889 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6890 } IEM_MC_ENDIF();
6891 IEM_MC_ADVANCE_RIP();
6892 IEM_MC_END();
6893 }
6894 else
6895 {
6896 /* memory target */
6897 IEM_MC_BEGIN(0, 1);
6898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6901 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6902 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6903 } IEM_MC_ELSE() {
6904 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6905 } IEM_MC_ENDIF();
6906 IEM_MC_ADVANCE_RIP();
6907 IEM_MC_END();
6908 }
6909 return VINF_SUCCESS;
6910}
6911
6912
6913/** Opcode 0x0f 0x9c. */
6914FNIEMOP_DEF(iemOp_setl_Eb)
6915{
6916 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
6917 IEMOP_HLP_MIN_386();
6918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6919
6920 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6921 * any way. AMD says it's "unused", whatever that means. We're
6922 * ignoring for now. */
6923 if (IEM_IS_MODRM_REG_MODE(bRm))
6924 {
6925 /* register target */
6926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6927 IEM_MC_BEGIN(0, 0);
6928 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6929 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6930 } IEM_MC_ELSE() {
6931 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6932 } IEM_MC_ENDIF();
6933 IEM_MC_ADVANCE_RIP();
6934 IEM_MC_END();
6935 }
6936 else
6937 {
6938 /* memory target */
6939 IEM_MC_BEGIN(0, 1);
6940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6943 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6944 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6945 } IEM_MC_ELSE() {
6946 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6947 } IEM_MC_ENDIF();
6948 IEM_MC_ADVANCE_RIP();
6949 IEM_MC_END();
6950 }
6951 return VINF_SUCCESS;
6952}
6953
6954
6955/** Opcode 0x0f 0x9d. */
6956FNIEMOP_DEF(iemOp_setnl_Eb)
6957{
6958 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6959 IEMOP_HLP_MIN_386();
6960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6961
6962 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6963 * any way. AMD says it's "unused", whatever that means. We're
6964 * ignoring for now. */
6965 if (IEM_IS_MODRM_REG_MODE(bRm))
6966 {
6967 /* register target */
6968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6969 IEM_MC_BEGIN(0, 0);
6970 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6971 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6972 } IEM_MC_ELSE() {
6973 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6974 } IEM_MC_ENDIF();
6975 IEM_MC_ADVANCE_RIP();
6976 IEM_MC_END();
6977 }
6978 else
6979 {
6980 /* memory target */
6981 IEM_MC_BEGIN(0, 1);
6982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6985 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6986 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6987 } IEM_MC_ELSE() {
6988 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6989 } IEM_MC_ENDIF();
6990 IEM_MC_ADVANCE_RIP();
6991 IEM_MC_END();
6992 }
6993 return VINF_SUCCESS;
6994}
6995
6996
6997/** Opcode 0x0f 0x9e. */
6998FNIEMOP_DEF(iemOp_setle_Eb)
6999{
7000 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
7001 IEMOP_HLP_MIN_386();
7002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7003
7004 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7005 * any way. AMD says it's "unused", whatever that means. We're
7006 * ignoring for now. */
7007 if (IEM_IS_MODRM_REG_MODE(bRm))
7008 {
7009 /* register target */
7010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7011 IEM_MC_BEGIN(0, 0);
7012 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7013 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7014 } IEM_MC_ELSE() {
7015 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7016 } IEM_MC_ENDIF();
7017 IEM_MC_ADVANCE_RIP();
7018 IEM_MC_END();
7019 }
7020 else
7021 {
7022 /* memory target */
7023 IEM_MC_BEGIN(0, 1);
7024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7027 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7028 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7029 } IEM_MC_ELSE() {
7030 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7031 } IEM_MC_ENDIF();
7032 IEM_MC_ADVANCE_RIP();
7033 IEM_MC_END();
7034 }
7035 return VINF_SUCCESS;
7036}
7037
7038
7039/** Opcode 0x0f 0x9f. */
7040FNIEMOP_DEF(iemOp_setnle_Eb)
7041{
7042 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
7043 IEMOP_HLP_MIN_386();
7044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7045
7046 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7047 * any way. AMD says it's "unused", whatever that means. We're
7048 * ignoring for now. */
7049 if (IEM_IS_MODRM_REG_MODE(bRm))
7050 {
7051 /* register target */
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053 IEM_MC_BEGIN(0, 0);
7054 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7055 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7056 } IEM_MC_ELSE() {
7057 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7058 } IEM_MC_ENDIF();
7059 IEM_MC_ADVANCE_RIP();
7060 IEM_MC_END();
7061 }
7062 else
7063 {
7064 /* memory target */
7065 IEM_MC_BEGIN(0, 1);
7066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7069 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7070 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7071 } IEM_MC_ELSE() {
7072 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7073 } IEM_MC_ENDIF();
7074 IEM_MC_ADVANCE_RIP();
7075 IEM_MC_END();
7076 }
7077 return VINF_SUCCESS;
7078}
7079
7080
7081/**
7082 * Common 'push segment-register' helper.
7083 */
7084FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
7085{
7086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7087 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
7088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7089
7090 switch (pVCpu->iem.s.enmEffOpSize)
7091 {
7092 case IEMMODE_16BIT:
7093 IEM_MC_BEGIN(0, 1);
7094 IEM_MC_LOCAL(uint16_t, u16Value);
7095 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
7096 IEM_MC_PUSH_U16(u16Value);
7097 IEM_MC_ADVANCE_RIP();
7098 IEM_MC_END();
7099 break;
7100
7101 case IEMMODE_32BIT:
7102 IEM_MC_BEGIN(0, 1);
7103 IEM_MC_LOCAL(uint32_t, u32Value);
7104 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
7105 IEM_MC_PUSH_U32_SREG(u32Value);
7106 IEM_MC_ADVANCE_RIP();
7107 IEM_MC_END();
7108 break;
7109
7110 case IEMMODE_64BIT:
7111 IEM_MC_BEGIN(0, 1);
7112 IEM_MC_LOCAL(uint64_t, u64Value);
7113 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
7114 IEM_MC_PUSH_U64(u64Value);
7115 IEM_MC_ADVANCE_RIP();
7116 IEM_MC_END();
7117 break;
7118 }
7119
7120 return VINF_SUCCESS;
7121}
7122
7123
7124/** Opcode 0x0f 0xa0. */
7125FNIEMOP_DEF(iemOp_push_fs)
7126{
7127 IEMOP_MNEMONIC(push_fs, "push fs");
7128 IEMOP_HLP_MIN_386();
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
7131}
7132
7133
7134/** Opcode 0x0f 0xa1. */
7135FNIEMOP_DEF(iemOp_pop_fs)
7136{
7137 IEMOP_MNEMONIC(pop_fs, "pop fs");
7138 IEMOP_HLP_MIN_386();
7139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7140 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
7141}
7142
7143
7144/** Opcode 0x0f 0xa2. */
7145FNIEMOP_DEF(iemOp_cpuid)
7146{
7147 IEMOP_MNEMONIC(cpuid, "cpuid");
7148 IEMOP_HLP_MIN_486(); /* not all 486es. */
7149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7150 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
7151}
7152
7153
7154/**
7155 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
7156 * iemOp_bts_Ev_Gv.
7157 */
7158FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
7159{
7160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7161 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7162
7163 if (IEM_IS_MODRM_REG_MODE(bRm))
7164 {
7165 /* register destination. */
7166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7167 switch (pVCpu->iem.s.enmEffOpSize)
7168 {
7169 case IEMMODE_16BIT:
7170 IEM_MC_BEGIN(3, 0);
7171 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7172 IEM_MC_ARG(uint16_t, u16Src, 1);
7173 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7174
7175 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7176 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
7177 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7178 IEM_MC_REF_EFLAGS(pEFlags);
7179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7180
7181 IEM_MC_ADVANCE_RIP();
7182 IEM_MC_END();
7183 return VINF_SUCCESS;
7184
7185 case IEMMODE_32BIT:
7186 IEM_MC_BEGIN(3, 0);
7187 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7188 IEM_MC_ARG(uint32_t, u32Src, 1);
7189 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7190
7191 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7192 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
7193 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7194 IEM_MC_REF_EFLAGS(pEFlags);
7195 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7196
7197 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7198 IEM_MC_ADVANCE_RIP();
7199 IEM_MC_END();
7200 return VINF_SUCCESS;
7201
7202 case IEMMODE_64BIT:
7203 IEM_MC_BEGIN(3, 0);
7204 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7205 IEM_MC_ARG(uint64_t, u64Src, 1);
7206 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7207
7208 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7209 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
7210 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7211 IEM_MC_REF_EFLAGS(pEFlags);
7212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7213
7214 IEM_MC_ADVANCE_RIP();
7215 IEM_MC_END();
7216 return VINF_SUCCESS;
7217
7218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7219 }
7220 }
7221 else
7222 {
7223 /* memory destination. */
7224
7225 uint32_t fAccess;
7226 if (pImpl->pfnLockedU16)
7227 fAccess = IEM_ACCESS_DATA_RW;
7228 else /* BT */
7229 fAccess = IEM_ACCESS_DATA_R;
7230
7231 /** @todo test negative bit offsets! */
7232 switch (pVCpu->iem.s.enmEffOpSize)
7233 {
7234 case IEMMODE_16BIT:
7235 IEM_MC_BEGIN(3, 2);
7236 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7237 IEM_MC_ARG(uint16_t, u16Src, 1);
7238 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7240 IEM_MC_LOCAL(int16_t, i16AddrAdj);
7241
7242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7243 if (pImpl->pfnLockedU16)
7244 IEMOP_HLP_DONE_DECODING();
7245 else
7246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7247 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7248 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
7249 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
7250 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
7251 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
7252 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
7253 IEM_MC_FETCH_EFLAGS(EFlags);
7254
7255 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7256 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7258 else
7259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7260 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7261
7262 IEM_MC_COMMIT_EFLAGS(EFlags);
7263 IEM_MC_ADVANCE_RIP();
7264 IEM_MC_END();
7265 return VINF_SUCCESS;
7266
7267 case IEMMODE_32BIT:
7268 IEM_MC_BEGIN(3, 2);
7269 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7270 IEM_MC_ARG(uint32_t, u32Src, 1);
7271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7273 IEM_MC_LOCAL(int32_t, i32AddrAdj);
7274
7275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7276 if (pImpl->pfnLockedU16)
7277 IEMOP_HLP_DONE_DECODING();
7278 else
7279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7280 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7281 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
7282 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
7283 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
7284 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
7285 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
7286 IEM_MC_FETCH_EFLAGS(EFlags);
7287
7288 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7289 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7291 else
7292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7294
7295 IEM_MC_COMMIT_EFLAGS(EFlags);
7296 IEM_MC_ADVANCE_RIP();
7297 IEM_MC_END();
7298 return VINF_SUCCESS;
7299
7300 case IEMMODE_64BIT:
7301 IEM_MC_BEGIN(3, 2);
7302 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7303 IEM_MC_ARG(uint64_t, u64Src, 1);
7304 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7306 IEM_MC_LOCAL(int64_t, i64AddrAdj);
7307
7308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7309 if (pImpl->pfnLockedU16)
7310 IEMOP_HLP_DONE_DECODING();
7311 else
7312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7314 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
7315 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
7316 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
7317 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
7318 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
7319 IEM_MC_FETCH_EFLAGS(EFlags);
7320
7321 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7323 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7324 else
7325 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7326 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7327
7328 IEM_MC_COMMIT_EFLAGS(EFlags);
7329 IEM_MC_ADVANCE_RIP();
7330 IEM_MC_END();
7331 return VINF_SUCCESS;
7332
7333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7334 }
7335 }
7336}
7337
7338
7339/** Opcode 0x0f 0xa3. */
7340FNIEMOP_DEF(iemOp_bt_Ev_Gv)
7341{
7342 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
7343 IEMOP_HLP_MIN_386();
7344 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
7345}
7346
7347
7348/**
7349 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
7350 */
7351FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
7352{
7353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7354 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7355
7356 if (IEM_IS_MODRM_REG_MODE(bRm))
7357 {
7358 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7360
7361 switch (pVCpu->iem.s.enmEffOpSize)
7362 {
7363 case IEMMODE_16BIT:
7364 IEM_MC_BEGIN(4, 0);
7365 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7366 IEM_MC_ARG(uint16_t, u16Src, 1);
7367 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7368 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7369
7370 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7371 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7372 IEM_MC_REF_EFLAGS(pEFlags);
7373 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7374
7375 IEM_MC_ADVANCE_RIP();
7376 IEM_MC_END();
7377 return VINF_SUCCESS;
7378
7379 case IEMMODE_32BIT:
7380 IEM_MC_BEGIN(4, 0);
7381 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7382 IEM_MC_ARG(uint32_t, u32Src, 1);
7383 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7384 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7385
7386 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7387 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7388 IEM_MC_REF_EFLAGS(pEFlags);
7389 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7390
7391 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7392 IEM_MC_ADVANCE_RIP();
7393 IEM_MC_END();
7394 return VINF_SUCCESS;
7395
7396 case IEMMODE_64BIT:
7397 IEM_MC_BEGIN(4, 0);
7398 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7399 IEM_MC_ARG(uint64_t, u64Src, 1);
7400 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7401 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7402
7403 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7404 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7405 IEM_MC_REF_EFLAGS(pEFlags);
7406 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7407
7408 IEM_MC_ADVANCE_RIP();
7409 IEM_MC_END();
7410 return VINF_SUCCESS;
7411
7412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7413 }
7414 }
7415 else
7416 {
7417 switch (pVCpu->iem.s.enmEffOpSize)
7418 {
7419 case IEMMODE_16BIT:
7420 IEM_MC_BEGIN(4, 2);
7421 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7422 IEM_MC_ARG(uint16_t, u16Src, 1);
7423 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7424 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7426
7427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7428 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7429 IEM_MC_ASSIGN(cShiftArg, cShift);
7430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7431 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7432 IEM_MC_FETCH_EFLAGS(EFlags);
7433 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7434 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7435
7436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7437 IEM_MC_COMMIT_EFLAGS(EFlags);
7438 IEM_MC_ADVANCE_RIP();
7439 IEM_MC_END();
7440 return VINF_SUCCESS;
7441
7442 case IEMMODE_32BIT:
7443 IEM_MC_BEGIN(4, 2);
7444 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7445 IEM_MC_ARG(uint32_t, u32Src, 1);
7446 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7447 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7449
7450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7451 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7452 IEM_MC_ASSIGN(cShiftArg, cShift);
7453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7454 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7455 IEM_MC_FETCH_EFLAGS(EFlags);
7456 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7457 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7458
7459 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7460 IEM_MC_COMMIT_EFLAGS(EFlags);
7461 IEM_MC_ADVANCE_RIP();
7462 IEM_MC_END();
7463 return VINF_SUCCESS;
7464
7465 case IEMMODE_64BIT:
7466 IEM_MC_BEGIN(4, 2);
7467 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7468 IEM_MC_ARG(uint64_t, u64Src, 1);
7469 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7470 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7472
7473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7474 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7475 IEM_MC_ASSIGN(cShiftArg, cShift);
7476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7477 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7478 IEM_MC_FETCH_EFLAGS(EFlags);
7479 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7480 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7481
7482 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7483 IEM_MC_COMMIT_EFLAGS(EFlags);
7484 IEM_MC_ADVANCE_RIP();
7485 IEM_MC_END();
7486 return VINF_SUCCESS;
7487
7488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7489 }
7490 }
7491}
7492
7493
7494/**
7495 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7496 */
7497FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7498{
7499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7500 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7501
7502 if (IEM_IS_MODRM_REG_MODE(bRm))
7503 {
7504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7505
7506 switch (pVCpu->iem.s.enmEffOpSize)
7507 {
7508 case IEMMODE_16BIT:
7509 IEM_MC_BEGIN(4, 0);
7510 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7511 IEM_MC_ARG(uint16_t, u16Src, 1);
7512 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7513 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7514
7515 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7516 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7517 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7518 IEM_MC_REF_EFLAGS(pEFlags);
7519 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7520
7521 IEM_MC_ADVANCE_RIP();
7522 IEM_MC_END();
7523 return VINF_SUCCESS;
7524
7525 case IEMMODE_32BIT:
7526 IEM_MC_BEGIN(4, 0);
7527 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7528 IEM_MC_ARG(uint32_t, u32Src, 1);
7529 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7530 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7531
7532 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7533 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7534 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7535 IEM_MC_REF_EFLAGS(pEFlags);
7536 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7537
7538 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7539 IEM_MC_ADVANCE_RIP();
7540 IEM_MC_END();
7541 return VINF_SUCCESS;
7542
7543 case IEMMODE_64BIT:
7544 IEM_MC_BEGIN(4, 0);
7545 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7546 IEM_MC_ARG(uint64_t, u64Src, 1);
7547 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7548 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7549
7550 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7551 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7552 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7553 IEM_MC_REF_EFLAGS(pEFlags);
7554 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7555
7556 IEM_MC_ADVANCE_RIP();
7557 IEM_MC_END();
7558 return VINF_SUCCESS;
7559
7560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7561 }
7562 }
7563 else
7564 {
7565 switch (pVCpu->iem.s.enmEffOpSize)
7566 {
7567 case IEMMODE_16BIT:
7568 IEM_MC_BEGIN(4, 2);
7569 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7570 IEM_MC_ARG(uint16_t, u16Src, 1);
7571 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7572 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7574
7575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7577 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7578 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7579 IEM_MC_FETCH_EFLAGS(EFlags);
7580 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7581 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7582
7583 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7584 IEM_MC_COMMIT_EFLAGS(EFlags);
7585 IEM_MC_ADVANCE_RIP();
7586 IEM_MC_END();
7587 return VINF_SUCCESS;
7588
7589 case IEMMODE_32BIT:
7590 IEM_MC_BEGIN(4, 2);
7591 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7592 IEM_MC_ARG(uint32_t, u32Src, 1);
7593 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7594 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7596
7597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7599 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7600 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7601 IEM_MC_FETCH_EFLAGS(EFlags);
7602 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7603 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7604
7605 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7606 IEM_MC_COMMIT_EFLAGS(EFlags);
7607 IEM_MC_ADVANCE_RIP();
7608 IEM_MC_END();
7609 return VINF_SUCCESS;
7610
7611 case IEMMODE_64BIT:
7612 IEM_MC_BEGIN(4, 2);
7613 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7614 IEM_MC_ARG(uint64_t, u64Src, 1);
7615 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7616 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7618
7619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7621 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7622 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7623 IEM_MC_FETCH_EFLAGS(EFlags);
7624 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7625 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7626
7627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7628 IEM_MC_COMMIT_EFLAGS(EFlags);
7629 IEM_MC_ADVANCE_RIP();
7630 IEM_MC_END();
7631 return VINF_SUCCESS;
7632
7633 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7634 }
7635 }
7636}
7637
7638
7639
7640/** Opcode 0x0f 0xa4. */
7641FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7642{
7643 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7644 IEMOP_HLP_MIN_386();
7645 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7646}
7647
7648
7649/** Opcode 0x0f 0xa5. */
7650FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7651{
7652 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7653 IEMOP_HLP_MIN_386();
7654 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7655}
7656
7657
7658/** Opcode 0x0f 0xa8. */
7659FNIEMOP_DEF(iemOp_push_gs)
7660{
7661 IEMOP_MNEMONIC(push_gs, "push gs");
7662 IEMOP_HLP_MIN_386();
7663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7664 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
7665}
7666
7667
7668/** Opcode 0x0f 0xa9. */
7669FNIEMOP_DEF(iemOp_pop_gs)
7670{
7671 IEMOP_MNEMONIC(pop_gs, "pop gs");
7672 IEMOP_HLP_MIN_386();
7673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7674 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
7675}
7676
7677
7678/** Opcode 0x0f 0xaa. */
7679FNIEMOP_DEF(iemOp_rsm)
7680{
7681 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
7682 IEMOP_HLP_MIN_386(); /* 386SL and later. */
7683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7684 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
7685}
7686
7687
7688
7689/** Opcode 0x0f 0xab. */
7690FNIEMOP_DEF(iemOp_bts_Ev_Gv)
7691{
7692 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
7693 IEMOP_HLP_MIN_386();
7694 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
7695}
7696
7697
7698/** Opcode 0x0f 0xac. */
7699FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
7700{
7701 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
7702 IEMOP_HLP_MIN_386();
7703 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7704}
7705
7706
7707/** Opcode 0x0f 0xad. */
7708FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
7709{
7710 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
7711 IEMOP_HLP_MIN_386();
7712 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7713}
7714
7715
7716/** Opcode 0x0f 0xae mem/0. */
7717FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
7718{
7719 IEMOP_MNEMONIC(fxsave, "fxsave m512");
7720 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7721 return IEMOP_RAISE_INVALID_OPCODE();
7722
7723 IEM_MC_BEGIN(3, 1);
7724 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7725 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7726 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7730 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7731 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
7732 IEM_MC_END();
7733 return VINF_SUCCESS;
7734}
7735
7736
7737/** Opcode 0x0f 0xae mem/1. */
7738FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
7739{
7740 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
7741 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7742 return IEMOP_RAISE_INVALID_OPCODE();
7743
7744 IEM_MC_BEGIN(3, 1);
7745 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7746 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7747 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7750 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7751 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7752 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7753 IEM_MC_END();
7754 return VINF_SUCCESS;
7755}
7756
7757
7758/**
7759 * @opmaps grp15
7760 * @opcode !11/2
7761 * @oppfx none
7762 * @opcpuid sse
7763 * @opgroup og_sse_mxcsrsm
7764 * @opxcpttype 5
7765 * @optest op1=0 -> mxcsr=0
7766 * @optest op1=0x2083 -> mxcsr=0x2083
7767 * @optest op1=0xfffffffe -> value.xcpt=0xd
7768 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
7769 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
7770 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
7771 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
7772 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
7773 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7774 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7775 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7776 */
7777FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
7778{
7779 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7780 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7781 return IEMOP_RAISE_INVALID_OPCODE();
7782
7783 IEM_MC_BEGIN(2, 0);
7784 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7785 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7788 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7789 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7790 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
7791 IEM_MC_END();
7792 return VINF_SUCCESS;
7793}
7794
7795
7796/**
7797 * @opmaps grp15
7798 * @opcode !11/3
7799 * @oppfx none
7800 * @opcpuid sse
7801 * @opgroup og_sse_mxcsrsm
7802 * @opxcpttype 5
7803 * @optest mxcsr=0 -> op1=0
7804 * @optest mxcsr=0x2083 -> op1=0x2083
7805 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
7806 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
7807 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
7808 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
7809 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
7810 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7811 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7812 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7813 */
7814FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
7815{
7816 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7817 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7818 return IEMOP_RAISE_INVALID_OPCODE();
7819
7820 IEM_MC_BEGIN(2, 0);
7821 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7822 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7825 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7826 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7827 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
7828 IEM_MC_END();
7829 return VINF_SUCCESS;
7830}
7831
7832
7833/**
7834 * @opmaps grp15
7835 * @opcode !11/4
7836 * @oppfx none
7837 * @opcpuid xsave
7838 * @opgroup og_system
7839 * @opxcpttype none
7840 */
7841FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
7842{
7843 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
7844 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7845 return IEMOP_RAISE_INVALID_OPCODE();
7846
7847 IEM_MC_BEGIN(3, 0);
7848 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7849 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7850 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7853 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7854 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7855 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
7856 IEM_MC_END();
7857 return VINF_SUCCESS;
7858}
7859
7860
7861/**
7862 * @opmaps grp15
7863 * @opcode !11/5
7864 * @oppfx none
7865 * @opcpuid xsave
7866 * @opgroup og_system
7867 * @opxcpttype none
7868 */
7869FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
7870{
7871 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
7872 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7873 return IEMOP_RAISE_INVALID_OPCODE();
7874
7875 IEM_MC_BEGIN(3, 0);
7876 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7877 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7878 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7881 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7882 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7883 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7884 IEM_MC_END();
7885 return VINF_SUCCESS;
7886}
7887
7888/** Opcode 0x0f 0xae mem/6. */
7889FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
7890
7891/**
7892 * @opmaps grp15
7893 * @opcode !11/7
7894 * @oppfx none
7895 * @opcpuid clfsh
7896 * @opgroup og_cachectl
7897 * @optest op1=1 ->
7898 */
7899FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
7900{
7901 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7902 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
7903 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7904
7905 IEM_MC_BEGIN(2, 0);
7906 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7907 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7910 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7911 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7912 IEM_MC_END();
7913 return VINF_SUCCESS;
7914}
7915
7916/**
7917 * @opmaps grp15
7918 * @opcode !11/7
7919 * @oppfx 0x66
7920 * @opcpuid clflushopt
7921 * @opgroup og_cachectl
7922 * @optest op1=1 ->
7923 */
7924FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
7925{
7926 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7927 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
7928 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7929
7930 IEM_MC_BEGIN(2, 0);
7931 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7932 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7935 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7936 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7937 IEM_MC_END();
7938 return VINF_SUCCESS;
7939}
7940
7941
7942/** Opcode 0x0f 0xae 11b/5. */
7943FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7944{
7945 RT_NOREF_PV(bRm);
7946 IEMOP_MNEMONIC(lfence, "lfence");
7947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7948 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7949 return IEMOP_RAISE_INVALID_OPCODE();
7950
7951 IEM_MC_BEGIN(0, 0);
7952#ifndef RT_ARCH_ARM64
7953 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7954#endif
7955 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7956#ifndef RT_ARCH_ARM64
7957 else
7958 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7959#endif
7960 IEM_MC_ADVANCE_RIP();
7961 IEM_MC_END();
7962 return VINF_SUCCESS;
7963}
7964
7965
7966/** Opcode 0x0f 0xae 11b/6. */
7967FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7968{
7969 RT_NOREF_PV(bRm);
7970 IEMOP_MNEMONIC(mfence, "mfence");
7971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7972 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7973 return IEMOP_RAISE_INVALID_OPCODE();
7974
7975 IEM_MC_BEGIN(0, 0);
7976#ifndef RT_ARCH_ARM64
7977 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7978#endif
7979 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7980#ifndef RT_ARCH_ARM64
7981 else
7982 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7983#endif
7984 IEM_MC_ADVANCE_RIP();
7985 IEM_MC_END();
7986 return VINF_SUCCESS;
7987}
7988
7989
7990/** Opcode 0x0f 0xae 11b/7. */
7991FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7992{
7993 RT_NOREF_PV(bRm);
7994 IEMOP_MNEMONIC(sfence, "sfence");
7995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7996 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7997 return IEMOP_RAISE_INVALID_OPCODE();
7998
7999 IEM_MC_BEGIN(0, 0);
8000#ifndef RT_ARCH_ARM64
8001 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8002#endif
8003 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
8004#ifndef RT_ARCH_ARM64
8005 else
8006 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8007#endif
8008 IEM_MC_ADVANCE_RIP();
8009 IEM_MC_END();
8010 return VINF_SUCCESS;
8011}
8012
8013
8014/** Opcode 0xf3 0x0f 0xae 11b/0. */
8015FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
8016{
8017 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
8018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8019 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8020 {
8021 IEM_MC_BEGIN(1, 0);
8022 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8023 IEM_MC_ARG(uint64_t, u64Dst, 0);
8024 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
8025 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8026 IEM_MC_ADVANCE_RIP();
8027 IEM_MC_END();
8028 }
8029 else
8030 {
8031 IEM_MC_BEGIN(1, 0);
8032 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8033 IEM_MC_ARG(uint32_t, u32Dst, 0);
8034 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
8035 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8036 IEM_MC_ADVANCE_RIP();
8037 IEM_MC_END();
8038 }
8039 return VINF_SUCCESS;
8040}
8041
8042
8043/** Opcode 0xf3 0x0f 0xae 11b/1. */
8044FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
8045{
8046 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
8047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8048 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8049 {
8050 IEM_MC_BEGIN(1, 0);
8051 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8052 IEM_MC_ARG(uint64_t, u64Dst, 0);
8053 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
8054 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8055 IEM_MC_ADVANCE_RIP();
8056 IEM_MC_END();
8057 }
8058 else
8059 {
8060 IEM_MC_BEGIN(1, 0);
8061 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8062 IEM_MC_ARG(uint32_t, u32Dst, 0);
8063 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
8064 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8065 IEM_MC_ADVANCE_RIP();
8066 IEM_MC_END();
8067 }
8068 return VINF_SUCCESS;
8069}
8070
8071
8072/** Opcode 0xf3 0x0f 0xae 11b/2. */
8073FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
8074{
8075 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
8076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8077 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8078 {
8079 IEM_MC_BEGIN(1, 0);
8080 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8081 IEM_MC_ARG(uint64_t, u64Dst, 0);
8082 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8083 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8084 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
8085 IEM_MC_ADVANCE_RIP();
8086 IEM_MC_END();
8087 }
8088 else
8089 {
8090 IEM_MC_BEGIN(1, 0);
8091 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8092 IEM_MC_ARG(uint32_t, u32Dst, 0);
8093 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8094 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
8095 IEM_MC_ADVANCE_RIP();
8096 IEM_MC_END();
8097 }
8098 return VINF_SUCCESS;
8099}
8100
8101
8102/** Opcode 0xf3 0x0f 0xae 11b/3. */
8103FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
8104{
8105 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
8106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8107 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8108 {
8109 IEM_MC_BEGIN(1, 0);
8110 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8111 IEM_MC_ARG(uint64_t, u64Dst, 0);
8112 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8113 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8114 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
8115 IEM_MC_ADVANCE_RIP();
8116 IEM_MC_END();
8117 }
8118 else
8119 {
8120 IEM_MC_BEGIN(1, 0);
8121 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8122 IEM_MC_ARG(uint32_t, u32Dst, 0);
8123 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8124 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
8125 IEM_MC_ADVANCE_RIP();
8126 IEM_MC_END();
8127 }
8128 return VINF_SUCCESS;
8129}
8130
8131
8132/**
8133 * Group 15 jump table for register variant.
8134 */
8135IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
8136{ /* pfx: none, 066h, 0f3h, 0f2h */
8137 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
8138 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
8139 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
8140 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
8141 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8142 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8143 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8144 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8145};
8146AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
8147
8148
8149/**
8150 * Group 15 jump table for memory variant.
8151 */
8152IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
8153{ /* pfx: none, 066h, 0f3h, 0f2h */
8154 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8155 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8156 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8157 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8158 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8159 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8160 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8161 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8162};
8163AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
8164
8165
8166/** Opcode 0x0f 0xae. */
8167FNIEMOP_DEF(iemOp_Grp15)
8168{
8169 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
8170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8171 if (IEM_IS_MODRM_REG_MODE(bRm))
8172 /* register, register */
8173 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8174 + pVCpu->iem.s.idxPrefix], bRm);
8175 /* memory, register */
8176 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8177 + pVCpu->iem.s.idxPrefix], bRm);
8178}
8179
8180
8181/** Opcode 0x0f 0xaf. */
8182FNIEMOP_DEF(iemOp_imul_Gv_Ev)
8183{
8184 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
8185 IEMOP_HLP_MIN_386();
8186 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8187 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
8188}
8189
8190
8191/** Opcode 0x0f 0xb0. */
8192FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
8193{
8194 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
8195 IEMOP_HLP_MIN_486();
8196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8197
8198 if (IEM_IS_MODRM_REG_MODE(bRm))
8199 {
8200 IEMOP_HLP_DONE_DECODING();
8201 IEM_MC_BEGIN(4, 0);
8202 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8203 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8204 IEM_MC_ARG(uint8_t, u8Src, 2);
8205 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8206
8207 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8208 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8209 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
8210 IEM_MC_REF_EFLAGS(pEFlags);
8211 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8212 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8213 else
8214 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8215
8216 IEM_MC_ADVANCE_RIP();
8217 IEM_MC_END();
8218 }
8219 else
8220 {
8221 IEM_MC_BEGIN(4, 3);
8222 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8223 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8224 IEM_MC_ARG(uint8_t, u8Src, 2);
8225 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8227 IEM_MC_LOCAL(uint8_t, u8Al);
8228
8229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8230 IEMOP_HLP_DONE_DECODING();
8231 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8232 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8233 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
8234 IEM_MC_FETCH_EFLAGS(EFlags);
8235 IEM_MC_REF_LOCAL(pu8Al, u8Al);
8236 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8237 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8238 else
8239 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8240
8241 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8242 IEM_MC_COMMIT_EFLAGS(EFlags);
8243 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
8244 IEM_MC_ADVANCE_RIP();
8245 IEM_MC_END();
8246 }
8247 return VINF_SUCCESS;
8248}
8249
8250/** Opcode 0x0f 0xb1. */
8251FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
8252{
8253 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
8254 IEMOP_HLP_MIN_486();
8255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8256
8257 if (IEM_IS_MODRM_REG_MODE(bRm))
8258 {
8259 IEMOP_HLP_DONE_DECODING();
8260 switch (pVCpu->iem.s.enmEffOpSize)
8261 {
8262 case IEMMODE_16BIT:
8263 IEM_MC_BEGIN(4, 0);
8264 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8265 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8266 IEM_MC_ARG(uint16_t, u16Src, 2);
8267 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8268
8269 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8270 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8271 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
8272 IEM_MC_REF_EFLAGS(pEFlags);
8273 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8275 else
8276 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8277
8278 IEM_MC_ADVANCE_RIP();
8279 IEM_MC_END();
8280 return VINF_SUCCESS;
8281
8282 case IEMMODE_32BIT:
8283 IEM_MC_BEGIN(4, 0);
8284 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8285 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8286 IEM_MC_ARG(uint32_t, u32Src, 2);
8287 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8288
8289 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8290 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8291 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
8292 IEM_MC_REF_EFLAGS(pEFlags);
8293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8294 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8295 else
8296 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8297
8298 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
8299 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8300 IEM_MC_ADVANCE_RIP();
8301 IEM_MC_END();
8302 return VINF_SUCCESS;
8303
8304 case IEMMODE_64BIT:
8305 IEM_MC_BEGIN(4, 0);
8306 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8307 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8308#ifdef RT_ARCH_X86
8309 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8310#else
8311 IEM_MC_ARG(uint64_t, u64Src, 2);
8312#endif
8313 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8314
8315 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8316 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
8317 IEM_MC_REF_EFLAGS(pEFlags);
8318#ifdef RT_ARCH_X86
8319 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8320 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8321 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8322 else
8323 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8324#else
8325 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8326 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8327 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8328 else
8329 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8330#endif
8331
8332 IEM_MC_ADVANCE_RIP();
8333 IEM_MC_END();
8334 return VINF_SUCCESS;
8335
8336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8337 }
8338 }
8339 else
8340 {
8341 switch (pVCpu->iem.s.enmEffOpSize)
8342 {
8343 case IEMMODE_16BIT:
8344 IEM_MC_BEGIN(4, 3);
8345 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8346 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8347 IEM_MC_ARG(uint16_t, u16Src, 2);
8348 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8350 IEM_MC_LOCAL(uint16_t, u16Ax);
8351
8352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8353 IEMOP_HLP_DONE_DECODING();
8354 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8355 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8356 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
8357 IEM_MC_FETCH_EFLAGS(EFlags);
8358 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
8359 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8360 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8361 else
8362 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8363
8364 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8365 IEM_MC_COMMIT_EFLAGS(EFlags);
8366 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
8367 IEM_MC_ADVANCE_RIP();
8368 IEM_MC_END();
8369 return VINF_SUCCESS;
8370
8371 case IEMMODE_32BIT:
8372 IEM_MC_BEGIN(4, 3);
8373 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8374 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8375 IEM_MC_ARG(uint32_t, u32Src, 2);
8376 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8378 IEM_MC_LOCAL(uint32_t, u32Eax);
8379
8380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8381 IEMOP_HLP_DONE_DECODING();
8382 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8383 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8384 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
8385 IEM_MC_FETCH_EFLAGS(EFlags);
8386 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
8387 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8388 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8389 else
8390 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8391
8392 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8393 IEM_MC_COMMIT_EFLAGS(EFlags);
8394 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
8395 IEM_MC_ADVANCE_RIP();
8396 IEM_MC_END();
8397 return VINF_SUCCESS;
8398
8399 case IEMMODE_64BIT:
8400 IEM_MC_BEGIN(4, 3);
8401 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8402 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8403#ifdef RT_ARCH_X86
8404 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8405#else
8406 IEM_MC_ARG(uint64_t, u64Src, 2);
8407#endif
8408 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8410 IEM_MC_LOCAL(uint64_t, u64Rax);
8411
8412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8413 IEMOP_HLP_DONE_DECODING();
8414 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8415 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8416 IEM_MC_FETCH_EFLAGS(EFlags);
8417 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8418#ifdef RT_ARCH_X86
8419 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8420 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8421 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8422 else
8423 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8424#else
8425 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8426 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8427 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8428 else
8429 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8430#endif
8431
8432 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8433 IEM_MC_COMMIT_EFLAGS(EFlags);
8434 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8435 IEM_MC_ADVANCE_RIP();
8436 IEM_MC_END();
8437 return VINF_SUCCESS;
8438
8439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8440 }
8441 }
8442}
8443
8444
8445FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8446{
8447 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8448 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8449
8450 switch (pVCpu->iem.s.enmEffOpSize)
8451 {
8452 case IEMMODE_16BIT:
8453 IEM_MC_BEGIN(5, 1);
8454 IEM_MC_ARG(uint16_t, uSel, 0);
8455 IEM_MC_ARG(uint16_t, offSeg, 1);
8456 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8457 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8458 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8459 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8462 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8463 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8464 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8465 IEM_MC_END();
8466 return VINF_SUCCESS;
8467
8468 case IEMMODE_32BIT:
8469 IEM_MC_BEGIN(5, 1);
8470 IEM_MC_ARG(uint16_t, uSel, 0);
8471 IEM_MC_ARG(uint32_t, offSeg, 1);
8472 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8473 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8474 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8475 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8478 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8479 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8480 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8481 IEM_MC_END();
8482 return VINF_SUCCESS;
8483
8484 case IEMMODE_64BIT:
8485 IEM_MC_BEGIN(5, 1);
8486 IEM_MC_ARG(uint16_t, uSel, 0);
8487 IEM_MC_ARG(uint64_t, offSeg, 1);
8488 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8489 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8490 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8491 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8494 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8495 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8496 else
8497 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8498 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8499 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8500 IEM_MC_END();
8501 return VINF_SUCCESS;
8502
8503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8504 }
8505}
8506
8507
8508/** Opcode 0x0f 0xb2. */
8509FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8510{
8511 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8512 IEMOP_HLP_MIN_386();
8513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8514 if (IEM_IS_MODRM_REG_MODE(bRm))
8515 return IEMOP_RAISE_INVALID_OPCODE();
8516 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8517}
8518
8519
8520/** Opcode 0x0f 0xb3. */
8521FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8522{
8523 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8524 IEMOP_HLP_MIN_386();
8525 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8526}
8527
8528
8529/** Opcode 0x0f 0xb4. */
8530FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8531{
8532 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8533 IEMOP_HLP_MIN_386();
8534 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8535 if (IEM_IS_MODRM_REG_MODE(bRm))
8536 return IEMOP_RAISE_INVALID_OPCODE();
8537 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8538}
8539
8540
8541/** Opcode 0x0f 0xb5. */
8542FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8543{
8544 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8545 IEMOP_HLP_MIN_386();
8546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8547 if (IEM_IS_MODRM_REG_MODE(bRm))
8548 return IEMOP_RAISE_INVALID_OPCODE();
8549 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8550}
8551
8552
8553/** Opcode 0x0f 0xb6. */
8554FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8555{
8556 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8557 IEMOP_HLP_MIN_386();
8558
8559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8560
8561 /*
8562 * If rm is denoting a register, no more instruction bytes.
8563 */
8564 if (IEM_IS_MODRM_REG_MODE(bRm))
8565 {
8566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8567 switch (pVCpu->iem.s.enmEffOpSize)
8568 {
8569 case IEMMODE_16BIT:
8570 IEM_MC_BEGIN(0, 1);
8571 IEM_MC_LOCAL(uint16_t, u16Value);
8572 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8573 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8574 IEM_MC_ADVANCE_RIP();
8575 IEM_MC_END();
8576 return VINF_SUCCESS;
8577
8578 case IEMMODE_32BIT:
8579 IEM_MC_BEGIN(0, 1);
8580 IEM_MC_LOCAL(uint32_t, u32Value);
8581 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8582 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8583 IEM_MC_ADVANCE_RIP();
8584 IEM_MC_END();
8585 return VINF_SUCCESS;
8586
8587 case IEMMODE_64BIT:
8588 IEM_MC_BEGIN(0, 1);
8589 IEM_MC_LOCAL(uint64_t, u64Value);
8590 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8591 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8592 IEM_MC_ADVANCE_RIP();
8593 IEM_MC_END();
8594 return VINF_SUCCESS;
8595
8596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8597 }
8598 }
8599 else
8600 {
8601 /*
8602 * We're loading a register from memory.
8603 */
8604 switch (pVCpu->iem.s.enmEffOpSize)
8605 {
8606 case IEMMODE_16BIT:
8607 IEM_MC_BEGIN(0, 2);
8608 IEM_MC_LOCAL(uint16_t, u16Value);
8609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8612 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8613 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8614 IEM_MC_ADVANCE_RIP();
8615 IEM_MC_END();
8616 return VINF_SUCCESS;
8617
8618 case IEMMODE_32BIT:
8619 IEM_MC_BEGIN(0, 2);
8620 IEM_MC_LOCAL(uint32_t, u32Value);
8621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8624 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8625 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8626 IEM_MC_ADVANCE_RIP();
8627 IEM_MC_END();
8628 return VINF_SUCCESS;
8629
8630 case IEMMODE_64BIT:
8631 IEM_MC_BEGIN(0, 2);
8632 IEM_MC_LOCAL(uint64_t, u64Value);
8633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8636 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8637 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8638 IEM_MC_ADVANCE_RIP();
8639 IEM_MC_END();
8640 return VINF_SUCCESS;
8641
8642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8643 }
8644 }
8645}
8646
8647
8648/** Opcode 0x0f 0xb7. */
8649FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8650{
8651 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8652 IEMOP_HLP_MIN_386();
8653
8654 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8655
8656 /** @todo Not entirely sure how the operand size prefix is handled here,
8657 * assuming that it will be ignored. Would be nice to have a few
8658 * test for this. */
8659 /*
8660 * If rm is denoting a register, no more instruction bytes.
8661 */
8662 if (IEM_IS_MODRM_REG_MODE(bRm))
8663 {
8664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8665 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8666 {
8667 IEM_MC_BEGIN(0, 1);
8668 IEM_MC_LOCAL(uint32_t, u32Value);
8669 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8670 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8671 IEM_MC_ADVANCE_RIP();
8672 IEM_MC_END();
8673 }
8674 else
8675 {
8676 IEM_MC_BEGIN(0, 1);
8677 IEM_MC_LOCAL(uint64_t, u64Value);
8678 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8679 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8680 IEM_MC_ADVANCE_RIP();
8681 IEM_MC_END();
8682 }
8683 }
8684 else
8685 {
8686 /*
8687 * We're loading a register from memory.
8688 */
8689 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8690 {
8691 IEM_MC_BEGIN(0, 2);
8692 IEM_MC_LOCAL(uint32_t, u32Value);
8693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8696 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8697 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8698 IEM_MC_ADVANCE_RIP();
8699 IEM_MC_END();
8700 }
8701 else
8702 {
8703 IEM_MC_BEGIN(0, 2);
8704 IEM_MC_LOCAL(uint64_t, u64Value);
8705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8708 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8709 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8710 IEM_MC_ADVANCE_RIP();
8711 IEM_MC_END();
8712 }
8713 }
8714 return VINF_SUCCESS;
8715}
8716
8717
8718/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
8719FNIEMOP_UD_STUB(iemOp_jmpe);
8720
8721
8722/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
8723FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
8724{
8725 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8726 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
8727 return iemOp_InvalidNeedRM(pVCpu);
8728#ifndef TST_IEM_CHECK_MC
8729# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
8730 static const IEMOPBINSIZES s_Native =
8731 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
8732# endif
8733 static const IEMOPBINSIZES s_Fallback =
8734 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
8735#endif
8736 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
8737}
8738
8739
8740/**
8741 * @opcode 0xb9
8742 * @opinvalid intel-modrm
8743 * @optest ->
8744 */
8745FNIEMOP_DEF(iemOp_Grp10)
8746{
8747 /*
8748 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
8749 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
8750 */
8751 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
8752 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
8753 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
8754}
8755
8756
8757/** Opcode 0x0f 0xba. */
8758FNIEMOP_DEF(iemOp_Grp8)
8759{
8760 IEMOP_HLP_MIN_386();
8761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8762 PCIEMOPBINSIZES pImpl;
8763 switch (IEM_GET_MODRM_REG_8(bRm))
8764 {
8765 case 0: case 1: case 2: case 3:
8766 /* Both AMD and Intel want full modr/m decoding and imm8. */
8767 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
8768 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
8769 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
8770 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
8771 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
8772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8773 }
8774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8775
8776 if (IEM_IS_MODRM_REG_MODE(bRm))
8777 {
8778 /* register destination. */
8779 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8781
8782 switch (pVCpu->iem.s.enmEffOpSize)
8783 {
8784 case IEMMODE_16BIT:
8785 IEM_MC_BEGIN(3, 0);
8786 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8787 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
8788 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8789
8790 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8791 IEM_MC_REF_EFLAGS(pEFlags);
8792 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8793
8794 IEM_MC_ADVANCE_RIP();
8795 IEM_MC_END();
8796 return VINF_SUCCESS;
8797
8798 case IEMMODE_32BIT:
8799 IEM_MC_BEGIN(3, 0);
8800 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8801 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
8802 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8803
8804 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8805 IEM_MC_REF_EFLAGS(pEFlags);
8806 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8807
8808 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8809 IEM_MC_ADVANCE_RIP();
8810 IEM_MC_END();
8811 return VINF_SUCCESS;
8812
8813 case IEMMODE_64BIT:
8814 IEM_MC_BEGIN(3, 0);
8815 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8816 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
8817 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8818
8819 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8820 IEM_MC_REF_EFLAGS(pEFlags);
8821 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8822
8823 IEM_MC_ADVANCE_RIP();
8824 IEM_MC_END();
8825 return VINF_SUCCESS;
8826
8827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8828 }
8829 }
8830 else
8831 {
8832 /* memory destination. */
8833
8834 uint32_t fAccess;
8835 if (pImpl->pfnLockedU16)
8836 fAccess = IEM_ACCESS_DATA_RW;
8837 else /* BT */
8838 fAccess = IEM_ACCESS_DATA_R;
8839
8840 /** @todo test negative bit offsets! */
8841 switch (pVCpu->iem.s.enmEffOpSize)
8842 {
8843 case IEMMODE_16BIT:
8844 IEM_MC_BEGIN(3, 1);
8845 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8846 IEM_MC_ARG(uint16_t, u16Src, 1);
8847 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8849
8850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8851 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8852 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
8853 if (pImpl->pfnLockedU16)
8854 IEMOP_HLP_DONE_DECODING();
8855 else
8856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8857 IEM_MC_FETCH_EFLAGS(EFlags);
8858 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8859 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8860 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8861 else
8862 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8863 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8864
8865 IEM_MC_COMMIT_EFLAGS(EFlags);
8866 IEM_MC_ADVANCE_RIP();
8867 IEM_MC_END();
8868 return VINF_SUCCESS;
8869
8870 case IEMMODE_32BIT:
8871 IEM_MC_BEGIN(3, 1);
8872 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8873 IEM_MC_ARG(uint32_t, u32Src, 1);
8874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8876
8877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8878 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8879 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
8880 if (pImpl->pfnLockedU16)
8881 IEMOP_HLP_DONE_DECODING();
8882 else
8883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8884 IEM_MC_FETCH_EFLAGS(EFlags);
8885 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8886 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8888 else
8889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8890 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8891
8892 IEM_MC_COMMIT_EFLAGS(EFlags);
8893 IEM_MC_ADVANCE_RIP();
8894 IEM_MC_END();
8895 return VINF_SUCCESS;
8896
8897 case IEMMODE_64BIT:
8898 IEM_MC_BEGIN(3, 1);
8899 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8900 IEM_MC_ARG(uint64_t, u64Src, 1);
8901 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8903
8904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8905 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8906 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
8907 if (pImpl->pfnLockedU16)
8908 IEMOP_HLP_DONE_DECODING();
8909 else
8910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8911 IEM_MC_FETCH_EFLAGS(EFlags);
8912 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8913 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8915 else
8916 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8917 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8918
8919 IEM_MC_COMMIT_EFLAGS(EFlags);
8920 IEM_MC_ADVANCE_RIP();
8921 IEM_MC_END();
8922 return VINF_SUCCESS;
8923
8924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8925 }
8926 }
8927}
8928
8929
8930/** Opcode 0x0f 0xbb. */
8931FNIEMOP_DEF(iemOp_btc_Ev_Gv)
8932{
8933 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
8934 IEMOP_HLP_MIN_386();
8935 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
8936}
8937
8938
8939/**
8940 * Common worker for BSF and BSR instructions.
8941 *
8942 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
8943 * the destination register, which means that for 32-bit operations the high
8944 * bits must be left alone.
8945 *
8946 * @param pImpl Pointer to the instruction implementation (assembly).
8947 */
8948FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
8949{
8950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8951
8952 /*
8953 * If rm is denoting a register, no more instruction bytes.
8954 */
8955 if (IEM_IS_MODRM_REG_MODE(bRm))
8956 {
8957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8958 switch (pVCpu->iem.s.enmEffOpSize)
8959 {
8960 case IEMMODE_16BIT:
8961 IEM_MC_BEGIN(3, 0);
8962 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8963 IEM_MC_ARG(uint16_t, u16Src, 1);
8964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8965
8966 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8967 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8968 IEM_MC_REF_EFLAGS(pEFlags);
8969 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8970
8971 IEM_MC_ADVANCE_RIP();
8972 IEM_MC_END();
8973 break;
8974
8975 case IEMMODE_32BIT:
8976 IEM_MC_BEGIN(3, 0);
8977 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8978 IEM_MC_ARG(uint32_t, u32Src, 1);
8979 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8980
8981 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8982 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8983 IEM_MC_REF_EFLAGS(pEFlags);
8984 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8985 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8986 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8987 IEM_MC_ENDIF();
8988 IEM_MC_ADVANCE_RIP();
8989 IEM_MC_END();
8990 break;
8991
8992 case IEMMODE_64BIT:
8993 IEM_MC_BEGIN(3, 0);
8994 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8995 IEM_MC_ARG(uint64_t, u64Src, 1);
8996 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8997
8998 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8999 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9000 IEM_MC_REF_EFLAGS(pEFlags);
9001 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9002
9003 IEM_MC_ADVANCE_RIP();
9004 IEM_MC_END();
9005 break;
9006 }
9007 }
9008 else
9009 {
9010 /*
9011 * We're accessing memory.
9012 */
9013 switch (pVCpu->iem.s.enmEffOpSize)
9014 {
9015 case IEMMODE_16BIT:
9016 IEM_MC_BEGIN(3, 1);
9017 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9018 IEM_MC_ARG(uint16_t, u16Src, 1);
9019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9021
9022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9024 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9025 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9026 IEM_MC_REF_EFLAGS(pEFlags);
9027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9028
9029 IEM_MC_ADVANCE_RIP();
9030 IEM_MC_END();
9031 break;
9032
9033 case IEMMODE_32BIT:
9034 IEM_MC_BEGIN(3, 1);
9035 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9036 IEM_MC_ARG(uint32_t, u32Src, 1);
9037 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9039
9040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9042 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9043 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9044 IEM_MC_REF_EFLAGS(pEFlags);
9045 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9046
9047 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9048 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9049 IEM_MC_ENDIF();
9050 IEM_MC_ADVANCE_RIP();
9051 IEM_MC_END();
9052 break;
9053
9054 case IEMMODE_64BIT:
9055 IEM_MC_BEGIN(3, 1);
9056 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9057 IEM_MC_ARG(uint64_t, u64Src, 1);
9058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9060
9061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9063 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9064 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9065 IEM_MC_REF_EFLAGS(pEFlags);
9066 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9067
9068 IEM_MC_ADVANCE_RIP();
9069 IEM_MC_END();
9070 break;
9071 }
9072 }
9073 return VINF_SUCCESS;
9074}
9075
9076
9077/** Opcode 0x0f 0xbc. */
9078FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
9079{
9080 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
9081 IEMOP_HLP_MIN_386();
9082 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9083 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
9084}
9085
9086
9087/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
9088FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
9089{
9090 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9091 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
9092 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9093
9094#ifndef TST_IEM_CHECK_MC
9095 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
9096 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
9097 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
9098 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
9099 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
9100 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
9101 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
9102 {
9103 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
9104 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
9105 };
9106#endif
9107 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9108 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9109 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9110}
9111
9112
9113/** Opcode 0x0f 0xbd. */
9114FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
9115{
9116 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
9117 IEMOP_HLP_MIN_386();
9118 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9119 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
9120}
9121
9122
9123/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
9124FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
9125{
9126 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9127 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
9128 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9129
9130#ifndef TST_IEM_CHECK_MC
9131 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
9132 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
9133 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
9134 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
9135 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
9136 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
9137 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
9138 {
9139 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
9140 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
9141 };
9142#endif
9143 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9144 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9145 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9146}
9147
9148
9149
9150/** Opcode 0x0f 0xbe. */
9151FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
9152{
9153 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
9154 IEMOP_HLP_MIN_386();
9155
9156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9157
9158 /*
9159 * If rm is denoting a register, no more instruction bytes.
9160 */
9161 if (IEM_IS_MODRM_REG_MODE(bRm))
9162 {
9163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9164 switch (pVCpu->iem.s.enmEffOpSize)
9165 {
9166 case IEMMODE_16BIT:
9167 IEM_MC_BEGIN(0, 1);
9168 IEM_MC_LOCAL(uint16_t, u16Value);
9169 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9170 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9171 IEM_MC_ADVANCE_RIP();
9172 IEM_MC_END();
9173 return VINF_SUCCESS;
9174
9175 case IEMMODE_32BIT:
9176 IEM_MC_BEGIN(0, 1);
9177 IEM_MC_LOCAL(uint32_t, u32Value);
9178 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9179 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9180 IEM_MC_ADVANCE_RIP();
9181 IEM_MC_END();
9182 return VINF_SUCCESS;
9183
9184 case IEMMODE_64BIT:
9185 IEM_MC_BEGIN(0, 1);
9186 IEM_MC_LOCAL(uint64_t, u64Value);
9187 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9188 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9189 IEM_MC_ADVANCE_RIP();
9190 IEM_MC_END();
9191 return VINF_SUCCESS;
9192
9193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9194 }
9195 }
9196 else
9197 {
9198 /*
9199 * We're loading a register from memory.
9200 */
9201 switch (pVCpu->iem.s.enmEffOpSize)
9202 {
9203 case IEMMODE_16BIT:
9204 IEM_MC_BEGIN(0, 2);
9205 IEM_MC_LOCAL(uint16_t, u16Value);
9206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9209 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9210 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9211 IEM_MC_ADVANCE_RIP();
9212 IEM_MC_END();
9213 return VINF_SUCCESS;
9214
9215 case IEMMODE_32BIT:
9216 IEM_MC_BEGIN(0, 2);
9217 IEM_MC_LOCAL(uint32_t, u32Value);
9218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9221 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9222 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9223 IEM_MC_ADVANCE_RIP();
9224 IEM_MC_END();
9225 return VINF_SUCCESS;
9226
9227 case IEMMODE_64BIT:
9228 IEM_MC_BEGIN(0, 2);
9229 IEM_MC_LOCAL(uint64_t, u64Value);
9230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9233 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9234 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9235 IEM_MC_ADVANCE_RIP();
9236 IEM_MC_END();
9237 return VINF_SUCCESS;
9238
9239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9240 }
9241 }
9242}
9243
9244
9245/** Opcode 0x0f 0xbf. */
9246FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
9247{
9248 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
9249 IEMOP_HLP_MIN_386();
9250
9251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9252
9253 /** @todo Not entirely sure how the operand size prefix is handled here,
9254 * assuming that it will be ignored. Would be nice to have a few
9255 * test for this. */
9256 /*
9257 * If rm is denoting a register, no more instruction bytes.
9258 */
9259 if (IEM_IS_MODRM_REG_MODE(bRm))
9260 {
9261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9262 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9263 {
9264 IEM_MC_BEGIN(0, 1);
9265 IEM_MC_LOCAL(uint32_t, u32Value);
9266 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9267 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9268 IEM_MC_ADVANCE_RIP();
9269 IEM_MC_END();
9270 }
9271 else
9272 {
9273 IEM_MC_BEGIN(0, 1);
9274 IEM_MC_LOCAL(uint64_t, u64Value);
9275 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9276 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9277 IEM_MC_ADVANCE_RIP();
9278 IEM_MC_END();
9279 }
9280 }
9281 else
9282 {
9283 /*
9284 * We're loading a register from memory.
9285 */
9286 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9287 {
9288 IEM_MC_BEGIN(0, 2);
9289 IEM_MC_LOCAL(uint32_t, u32Value);
9290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9293 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9294 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9295 IEM_MC_ADVANCE_RIP();
9296 IEM_MC_END();
9297 }
9298 else
9299 {
9300 IEM_MC_BEGIN(0, 2);
9301 IEM_MC_LOCAL(uint64_t, u64Value);
9302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9305 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9306 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9307 IEM_MC_ADVANCE_RIP();
9308 IEM_MC_END();
9309 }
9310 }
9311 return VINF_SUCCESS;
9312}
9313
9314
9315/** Opcode 0x0f 0xc0. */
9316FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
9317{
9318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9319 IEMOP_HLP_MIN_486();
9320 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
9321
9322 /*
9323 * If rm is denoting a register, no more instruction bytes.
9324 */
9325 if (IEM_IS_MODRM_REG_MODE(bRm))
9326 {
9327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9328
9329 IEM_MC_BEGIN(3, 0);
9330 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9331 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9332 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9333
9334 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9335 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9336 IEM_MC_REF_EFLAGS(pEFlags);
9337 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9338
9339 IEM_MC_ADVANCE_RIP();
9340 IEM_MC_END();
9341 }
9342 else
9343 {
9344 /*
9345 * We're accessing memory.
9346 */
9347 IEM_MC_BEGIN(3, 3);
9348 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9349 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9350 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9351 IEM_MC_LOCAL(uint8_t, u8RegCopy);
9352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9353
9354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9355 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9356 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9357 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
9358 IEM_MC_FETCH_EFLAGS(EFlags);
9359 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9360 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9361 else
9362 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
9363
9364 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9365 IEM_MC_COMMIT_EFLAGS(EFlags);
9366 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
9367 IEM_MC_ADVANCE_RIP();
9368 IEM_MC_END();
9369 return VINF_SUCCESS;
9370 }
9371 return VINF_SUCCESS;
9372}
9373
9374
9375/** Opcode 0x0f 0xc1. */
9376FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
9377{
9378 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
9379 IEMOP_HLP_MIN_486();
9380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9381
9382 /*
9383 * If rm is denoting a register, no more instruction bytes.
9384 */
9385 if (IEM_IS_MODRM_REG_MODE(bRm))
9386 {
9387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9388
9389 switch (pVCpu->iem.s.enmEffOpSize)
9390 {
9391 case IEMMODE_16BIT:
9392 IEM_MC_BEGIN(3, 0);
9393 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9394 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9395 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9396
9397 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9398 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9399 IEM_MC_REF_EFLAGS(pEFlags);
9400 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9401
9402 IEM_MC_ADVANCE_RIP();
9403 IEM_MC_END();
9404 return VINF_SUCCESS;
9405
9406 case IEMMODE_32BIT:
9407 IEM_MC_BEGIN(3, 0);
9408 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9409 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9411
9412 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9413 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9414 IEM_MC_REF_EFLAGS(pEFlags);
9415 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9416
9417 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9418 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9419 IEM_MC_ADVANCE_RIP();
9420 IEM_MC_END();
9421 return VINF_SUCCESS;
9422
9423 case IEMMODE_64BIT:
9424 IEM_MC_BEGIN(3, 0);
9425 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9426 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9427 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9428
9429 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9430 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9431 IEM_MC_REF_EFLAGS(pEFlags);
9432 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9433
9434 IEM_MC_ADVANCE_RIP();
9435 IEM_MC_END();
9436 return VINF_SUCCESS;
9437
9438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9439 }
9440 }
9441 else
9442 {
9443 /*
9444 * We're accessing memory.
9445 */
9446 switch (pVCpu->iem.s.enmEffOpSize)
9447 {
9448 case IEMMODE_16BIT:
9449 IEM_MC_BEGIN(3, 3);
9450 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9451 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9452 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9453 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9455
9456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9457 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9458 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9459 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9460 IEM_MC_FETCH_EFLAGS(EFlags);
9461 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9462 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9463 else
9464 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9465
9466 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9467 IEM_MC_COMMIT_EFLAGS(EFlags);
9468 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9469 IEM_MC_ADVANCE_RIP();
9470 IEM_MC_END();
9471 return VINF_SUCCESS;
9472
9473 case IEMMODE_32BIT:
9474 IEM_MC_BEGIN(3, 3);
9475 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9476 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9477 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9478 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9480
9481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9482 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9483 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9484 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9485 IEM_MC_FETCH_EFLAGS(EFlags);
9486 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9487 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9488 else
9489 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9490
9491 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9492 IEM_MC_COMMIT_EFLAGS(EFlags);
9493 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9494 IEM_MC_ADVANCE_RIP();
9495 IEM_MC_END();
9496 return VINF_SUCCESS;
9497
9498 case IEMMODE_64BIT:
9499 IEM_MC_BEGIN(3, 3);
9500 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9501 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9502 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9503 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9505
9506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9507 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9508 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9509 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9510 IEM_MC_FETCH_EFLAGS(EFlags);
9511 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9512 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9513 else
9514 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9515
9516 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9517 IEM_MC_COMMIT_EFLAGS(EFlags);
9518 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9519 IEM_MC_ADVANCE_RIP();
9520 IEM_MC_END();
9521 return VINF_SUCCESS;
9522
9523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9524 }
9525 }
9526}
9527
9528
9529/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9530FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9531/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9532FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9533/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9534FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9535/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9536FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9537
9538
9539/** Opcode 0x0f 0xc3. */
9540FNIEMOP_DEF(iemOp_movnti_My_Gy)
9541{
9542 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9543
9544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9545
9546 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9547 if (IEM_IS_MODRM_MEM_MODE(bRm))
9548 {
9549 switch (pVCpu->iem.s.enmEffOpSize)
9550 {
9551 case IEMMODE_32BIT:
9552 IEM_MC_BEGIN(0, 2);
9553 IEM_MC_LOCAL(uint32_t, u32Value);
9554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9555
9556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9558 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9559 return IEMOP_RAISE_INVALID_OPCODE();
9560
9561 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9562 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9563 IEM_MC_ADVANCE_RIP();
9564 IEM_MC_END();
9565 break;
9566
9567 case IEMMODE_64BIT:
9568 IEM_MC_BEGIN(0, 2);
9569 IEM_MC_LOCAL(uint64_t, u64Value);
9570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9571
9572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9574 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9575 return IEMOP_RAISE_INVALID_OPCODE();
9576
9577 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9578 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9579 IEM_MC_ADVANCE_RIP();
9580 IEM_MC_END();
9581 break;
9582
9583 case IEMMODE_16BIT:
9584 /** @todo check this form. */
9585 return IEMOP_RAISE_INVALID_OPCODE();
9586 }
9587 }
9588 else
9589 return IEMOP_RAISE_INVALID_OPCODE();
9590 return VINF_SUCCESS;
9591}
9592/* Opcode 0x66 0x0f 0xc3 - invalid */
9593/* Opcode 0xf3 0x0f 0xc3 - invalid */
9594/* Opcode 0xf2 0x0f 0xc3 - invalid */
9595
9596/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9597FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9598/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9599FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9600/* Opcode 0xf3 0x0f 0xc4 - invalid */
9601/* Opcode 0xf2 0x0f 0xc4 - invalid */
9602
9603/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9604FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9605/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9606FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9607/* Opcode 0xf3 0x0f 0xc5 - invalid */
9608/* Opcode 0xf2 0x0f 0xc5 - invalid */
9609
9610/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9611FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
9612/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
9613FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
9614/* Opcode 0xf3 0x0f 0xc6 - invalid */
9615/* Opcode 0xf2 0x0f 0xc6 - invalid */
9616
9617
9618/** Opcode 0x0f 0xc7 !11/1. */
9619FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
9620{
9621 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
9622
9623 IEM_MC_BEGIN(4, 3);
9624 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
9625 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
9626 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
9627 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9628 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
9629 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
9630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9631
9632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9633 IEMOP_HLP_DONE_DECODING();
9634 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9635
9636 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
9637 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
9638 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
9639
9640 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
9641 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
9642 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
9643
9644 IEM_MC_FETCH_EFLAGS(EFlags);
9645 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9646 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9647 else
9648 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9649
9650 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
9651 IEM_MC_COMMIT_EFLAGS(EFlags);
9652 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9653 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
9654 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
9655 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
9656 IEM_MC_ENDIF();
9657 IEM_MC_ADVANCE_RIP();
9658
9659 IEM_MC_END();
9660 return VINF_SUCCESS;
9661}
9662
9663
9664/** Opcode REX.W 0x0f 0xc7 !11/1. */
9665FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
9666{
9667 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
9668 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9669 {
9670#if 0
9671 RT_NOREF(bRm);
9672 IEMOP_BITCH_ABOUT_STUB();
9673 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9674#else
9675 IEM_MC_BEGIN(4, 3);
9676 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
9677 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
9678 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
9679 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9680 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
9681 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
9682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9683
9684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9685 IEMOP_HLP_DONE_DECODING();
9686 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
9687 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9688
9689 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
9690 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
9691 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
9692
9693 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
9694 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
9695 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
9696
9697 IEM_MC_FETCH_EFLAGS(EFlags);
9698# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
9699# if defined(RT_ARCH_AMD64)
9700 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9701# endif
9702 {
9703 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9704 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9705 else
9706 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9707 }
9708# if defined(RT_ARCH_AMD64)
9709 else
9710# endif
9711# endif
9712# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
9713 {
9714 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
9715 accesses and not all all atomic, which works fine on in UNI CPU guest
9716 configuration (ignoring DMA). If guest SMP is active we have no choice
9717 but to use a rendezvous callback here. Sigh. */
9718 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
9719 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9720 else
9721 {
9722 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9723 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
9724 }
9725 }
9726# endif
9727
9728 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
9729 IEM_MC_COMMIT_EFLAGS(EFlags);
9730 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9731 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
9732 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
9733 IEM_MC_ENDIF();
9734 IEM_MC_ADVANCE_RIP();
9735
9736 IEM_MC_END();
9737 return VINF_SUCCESS;
9738#endif
9739 }
9740 Log(("cmpxchg16b -> #UD\n"));
9741 return IEMOP_RAISE_INVALID_OPCODE();
9742}
9743
9744FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
9745{
9746 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
9747 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
9748 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
9749}
9750
9751/** Opcode 0x0f 0xc7 11/6. */
9752FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
9753
9754/** Opcode 0x0f 0xc7 !11/6. */
9755#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9756FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
9757{
9758 IEMOP_MNEMONIC(vmptrld, "vmptrld");
9759 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
9760 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
9761 IEM_MC_BEGIN(2, 0);
9762 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9763 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9765 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9766 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9767 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
9768 IEM_MC_END();
9769 return VINF_SUCCESS;
9770}
9771#else
9772FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
9773#endif
9774
9775/** Opcode 0x66 0x0f 0xc7 !11/6. */
9776#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9777FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
9778{
9779 IEMOP_MNEMONIC(vmclear, "vmclear");
9780 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
9781 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
9782 IEM_MC_BEGIN(2, 0);
9783 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9784 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9786 IEMOP_HLP_DONE_DECODING();
9787 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9788 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
9789 IEM_MC_END();
9790 return VINF_SUCCESS;
9791}
9792#else
9793FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
9794#endif
9795
9796/** Opcode 0xf3 0x0f 0xc7 !11/6. */
9797#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9798FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
9799{
9800 IEMOP_MNEMONIC(vmxon, "vmxon");
9801 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
9802 IEM_MC_BEGIN(2, 0);
9803 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9804 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9806 IEMOP_HLP_DONE_DECODING();
9807 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9808 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
9809 IEM_MC_END();
9810 return VINF_SUCCESS;
9811}
9812#else
9813FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
9814#endif
9815
9816/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
9817#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9818FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
9819{
9820 IEMOP_MNEMONIC(vmptrst, "vmptrst");
9821 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
9822 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
9823 IEM_MC_BEGIN(2, 0);
9824 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9825 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9827 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9828 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9829 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
9830 IEM_MC_END();
9831 return VINF_SUCCESS;
9832}
9833#else
9834FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
9835#endif
9836
9837/** Opcode 0x0f 0xc7 11/7. */
9838FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
9839
9840
9841/**
9842 * Group 9 jump table for register variant.
9843 */
9844IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
9845{ /* pfx: none, 066h, 0f3h, 0f2h */
9846 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9847 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
9848 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9849 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9850 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9851 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9852 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9853 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9854};
9855AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
9856
9857
9858/**
9859 * Group 9 jump table for memory variant.
9860 */
9861IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
9862{ /* pfx: none, 066h, 0f3h, 0f2h */
9863 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9864 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
9865 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9866 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9867 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9868 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9869 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
9870 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9871};
9872AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
9873
9874
9875/** Opcode 0x0f 0xc7. */
9876FNIEMOP_DEF(iemOp_Grp9)
9877{
9878 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
9879 if (IEM_IS_MODRM_REG_MODE(bRm))
9880 /* register, register */
9881 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9882 + pVCpu->iem.s.idxPrefix], bRm);
9883 /* memory, register */
9884 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9885 + pVCpu->iem.s.idxPrefix], bRm);
9886}
9887
9888
9889/**
9890 * Common 'bswap register' helper.
9891 */
9892FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
9893{
9894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9895 switch (pVCpu->iem.s.enmEffOpSize)
9896 {
9897 case IEMMODE_16BIT:
9898 IEM_MC_BEGIN(1, 0);
9899 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9900 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
9901 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
9902 IEM_MC_ADVANCE_RIP();
9903 IEM_MC_END();
9904 return VINF_SUCCESS;
9905
9906 case IEMMODE_32BIT:
9907 IEM_MC_BEGIN(1, 0);
9908 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9909 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9910 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9911 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
9912 IEM_MC_ADVANCE_RIP();
9913 IEM_MC_END();
9914 return VINF_SUCCESS;
9915
9916 case IEMMODE_64BIT:
9917 IEM_MC_BEGIN(1, 0);
9918 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9919 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9920 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
9921 IEM_MC_ADVANCE_RIP();
9922 IEM_MC_END();
9923 return VINF_SUCCESS;
9924
9925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9926 }
9927}
9928
9929
9930/** Opcode 0x0f 0xc8. */
9931FNIEMOP_DEF(iemOp_bswap_rAX_r8)
9932{
9933 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
9934 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
9935 prefix. REX.B is the correct prefix it appears. For a parallel
9936 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
9937 IEMOP_HLP_MIN_486();
9938 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
9939}
9940
9941
9942/** Opcode 0x0f 0xc9. */
9943FNIEMOP_DEF(iemOp_bswap_rCX_r9)
9944{
9945 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
9946 IEMOP_HLP_MIN_486();
9947 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
9948}
9949
9950
9951/** Opcode 0x0f 0xca. */
9952FNIEMOP_DEF(iemOp_bswap_rDX_r10)
9953{
9954 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
9955 IEMOP_HLP_MIN_486();
9956 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
9957}
9958
9959
9960/** Opcode 0x0f 0xcb. */
9961FNIEMOP_DEF(iemOp_bswap_rBX_r11)
9962{
9963 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
9964 IEMOP_HLP_MIN_486();
9965 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
9966}
9967
9968
9969/** Opcode 0x0f 0xcc. */
9970FNIEMOP_DEF(iemOp_bswap_rSP_r12)
9971{
9972 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
9973 IEMOP_HLP_MIN_486();
9974 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
9975}
9976
9977
9978/** Opcode 0x0f 0xcd. */
9979FNIEMOP_DEF(iemOp_bswap_rBP_r13)
9980{
9981 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
9982 IEMOP_HLP_MIN_486();
9983 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
9984}
9985
9986
9987/** Opcode 0x0f 0xce. */
9988FNIEMOP_DEF(iemOp_bswap_rSI_r14)
9989{
9990 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
9991 IEMOP_HLP_MIN_486();
9992 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
9993}
9994
9995
9996/** Opcode 0x0f 0xcf. */
9997FNIEMOP_DEF(iemOp_bswap_rDI_r15)
9998{
9999 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
10000 IEMOP_HLP_MIN_486();
10001 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
10002}
10003
10004
10005/* Opcode 0x0f 0xd0 - invalid */
10006/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
10007FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
10008/* Opcode 0xf3 0x0f 0xd0 - invalid */
10009/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
10010FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
10011
10012/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
10013FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
10014{
10015 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10016 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
10017}
10018
10019/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
10020FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
10021{
10022 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10023 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
10024}
10025
10026/* Opcode 0xf3 0x0f 0xd1 - invalid */
10027/* Opcode 0xf2 0x0f 0xd1 - invalid */
10028
10029/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
10030FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
10031{
10032 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10033 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
10034}
10035
10036
10037/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
10038FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
10039{
10040 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10041 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
10042}
10043
10044
10045/* Opcode 0xf3 0x0f 0xd2 - invalid */
10046/* Opcode 0xf2 0x0f 0xd2 - invalid */
10047
10048/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
10049FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
10050{
10051 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10052 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
10053}
10054
10055
10056/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
10057FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
10058{
10059 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10060 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
10061}
10062
10063
10064/* Opcode 0xf3 0x0f 0xd3 - invalid */
10065/* Opcode 0xf2 0x0f 0xd3 - invalid */
10066
10067
10068/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
10069FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
10070{
10071 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10072 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10073}
10074
10075
10076/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
10077FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
10078{
10079 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10080 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
10081}
10082
10083
10084/* Opcode 0xf3 0x0f 0xd4 - invalid */
10085/* Opcode 0xf2 0x0f 0xd4 - invalid */
10086
10087/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
10088FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
10089{
10090 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10091 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
10092}
10093
10094/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
10095FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
10096{
10097 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10098 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
10099}
10100
10101
10102/* Opcode 0xf3 0x0f 0xd5 - invalid */
10103/* Opcode 0xf2 0x0f 0xd5 - invalid */
10104
10105/* Opcode 0x0f 0xd6 - invalid */
10106
10107/**
10108 * @opcode 0xd6
10109 * @oppfx 0x66
10110 * @opcpuid sse2
10111 * @opgroup og_sse2_pcksclr_datamove
10112 * @opxcpttype none
10113 * @optest op1=-1 op2=2 -> op1=2
10114 * @optest op1=0 op2=-42 -> op1=-42
10115 */
10116FNIEMOP_DEF(iemOp_movq_Wq_Vq)
10117{
10118 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10120 if (IEM_IS_MODRM_REG_MODE(bRm))
10121 {
10122 /*
10123 * Register, register.
10124 */
10125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10126 IEM_MC_BEGIN(0, 2);
10127 IEM_MC_LOCAL(uint64_t, uSrc);
10128
10129 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10130 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
10131
10132 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10133 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
10134
10135 IEM_MC_ADVANCE_RIP();
10136 IEM_MC_END();
10137 }
10138 else
10139 {
10140 /*
10141 * Memory, register.
10142 */
10143 IEM_MC_BEGIN(0, 2);
10144 IEM_MC_LOCAL(uint64_t, uSrc);
10145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10146
10147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10149 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10150 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10151
10152 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10153 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10154
10155 IEM_MC_ADVANCE_RIP();
10156 IEM_MC_END();
10157 }
10158 return VINF_SUCCESS;
10159}
10160
10161
10162/**
10163 * @opcode 0xd6
10164 * @opcodesub 11 mr/reg
10165 * @oppfx f3
10166 * @opcpuid sse2
10167 * @opgroup og_sse2_simdint_datamove
10168 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10169 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10170 */
10171FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
10172{
10173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10174 if (IEM_IS_MODRM_REG_MODE(bRm))
10175 {
10176 /*
10177 * Register, register.
10178 */
10179 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10181 IEM_MC_BEGIN(0, 1);
10182 IEM_MC_LOCAL(uint64_t, uSrc);
10183
10184 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10185 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10186
10187 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
10188 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
10189 IEM_MC_FPU_TO_MMX_MODE();
10190
10191 IEM_MC_ADVANCE_RIP();
10192 IEM_MC_END();
10193 return VINF_SUCCESS;
10194 }
10195
10196 /**
10197 * @opdone
10198 * @opmnemonic udf30fd6mem
10199 * @opcode 0xd6
10200 * @opcodesub !11 mr/reg
10201 * @oppfx f3
10202 * @opunused intel-modrm
10203 * @opcpuid sse
10204 * @optest ->
10205 */
10206 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10207}
10208
10209
10210/**
10211 * @opcode 0xd6
10212 * @opcodesub 11 mr/reg
10213 * @oppfx f2
10214 * @opcpuid sse2
10215 * @opgroup og_sse2_simdint_datamove
10216 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10217 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10218 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
10219 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
10220 * @optest op1=-42 op2=0xfedcba9876543210
10221 * -> op1=0xfedcba9876543210 ftw=0xff
10222 */
10223FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
10224{
10225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10226 if (IEM_IS_MODRM_REG_MODE(bRm))
10227 {
10228 /*
10229 * Register, register.
10230 */
10231 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10233 IEM_MC_BEGIN(0, 1);
10234 IEM_MC_LOCAL(uint64_t, uSrc);
10235
10236 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10237 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10238
10239 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10240 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
10241 IEM_MC_FPU_TO_MMX_MODE();
10242
10243 IEM_MC_ADVANCE_RIP();
10244 IEM_MC_END();
10245 return VINF_SUCCESS;
10246 }
10247
10248 /**
10249 * @opdone
10250 * @opmnemonic udf20fd6mem
10251 * @opcode 0xd6
10252 * @opcodesub !11 mr/reg
10253 * @oppfx f2
10254 * @opunused intel-modrm
10255 * @opcpuid sse
10256 * @optest ->
10257 */
10258 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10259}
10260
10261
10262/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
10263FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
10264{
10265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10266 /* Docs says register only. */
10267 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10268 {
10269 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10270 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
10271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10272 IEM_MC_BEGIN(2, 0);
10273 IEM_MC_ARG(uint64_t *, puDst, 0);
10274 IEM_MC_ARG(uint64_t const *, puSrc, 1);
10275 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
10276 IEM_MC_PREPARE_FPU_USAGE();
10277 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
10278 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
10279 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
10280 IEM_MC_FPU_TO_MMX_MODE();
10281 IEM_MC_ADVANCE_RIP();
10282 IEM_MC_END();
10283 return VINF_SUCCESS;
10284 }
10285 return IEMOP_RAISE_INVALID_OPCODE();
10286}
10287
10288
10289/** Opcode 0x66 0x0f 0xd7 - */
10290FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
10291{
10292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10293 /* Docs says register only. */
10294 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10295 {
10296 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10297 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
10298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10299 IEM_MC_BEGIN(2, 0);
10300 IEM_MC_ARG(uint64_t *, puDst, 0);
10301 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
10302 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10303 IEM_MC_PREPARE_SSE_USAGE();
10304 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10305 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10306 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
10307 IEM_MC_ADVANCE_RIP();
10308 IEM_MC_END();
10309 return VINF_SUCCESS;
10310 }
10311 return IEMOP_RAISE_INVALID_OPCODE();
10312}
10313
10314
10315/* Opcode 0xf3 0x0f 0xd7 - invalid */
10316/* Opcode 0xf2 0x0f 0xd7 - invalid */
10317
10318
10319/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
10320FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
10321{
10322 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10323 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
10324}
10325
10326
10327/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
10328FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
10329{
10330 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10331 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
10332}
10333
10334
10335/* Opcode 0xf3 0x0f 0xd8 - invalid */
10336/* Opcode 0xf2 0x0f 0xd8 - invalid */
10337
10338/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
10339FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
10340{
10341 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10342 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
10343}
10344
10345
10346/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
10347FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
10348{
10349 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10350 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
10351}
10352
10353
10354/* Opcode 0xf3 0x0f 0xd9 - invalid */
10355/* Opcode 0xf2 0x0f 0xd9 - invalid */
10356
10357/** Opcode 0x0f 0xda - pminub Pq, Qq */
10358FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
10359{
10360 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10361 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
10362}
10363
10364
10365/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
10366FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
10367{
10368 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10369 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
10370}
10371
10372/* Opcode 0xf3 0x0f 0xda - invalid */
10373/* Opcode 0xf2 0x0f 0xda - invalid */
10374
10375/** Opcode 0x0f 0xdb - pand Pq, Qq */
10376FNIEMOP_DEF(iemOp_pand_Pq_Qq)
10377{
10378 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10379 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
10380}
10381
10382
10383/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
10384FNIEMOP_DEF(iemOp_pand_Vx_Wx)
10385{
10386 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10387 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
10388}
10389
10390
10391/* Opcode 0xf3 0x0f 0xdb - invalid */
10392/* Opcode 0xf2 0x0f 0xdb - invalid */
10393
10394/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
10395FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
10396{
10397 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10398 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
10399}
10400
10401
10402/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10403FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10404{
10405 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10406 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10407}
10408
10409
10410/* Opcode 0xf3 0x0f 0xdc - invalid */
10411/* Opcode 0xf2 0x0f 0xdc - invalid */
10412
10413/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10414FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10415{
10416 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10417 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10418}
10419
10420
10421/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
10422FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
10423{
10424 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10425 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
10426}
10427
10428
10429/* Opcode 0xf3 0x0f 0xdd - invalid */
10430/* Opcode 0xf2 0x0f 0xdd - invalid */
10431
10432/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
10433FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
10434{
10435 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10436 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
10437}
10438
10439
10440/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
10441FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
10442{
10443 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10444 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
10445}
10446
10447/* Opcode 0xf3 0x0f 0xde - invalid */
10448/* Opcode 0xf2 0x0f 0xde - invalid */
10449
10450
10451/** Opcode 0x0f 0xdf - pandn Pq, Qq */
10452FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
10453{
10454 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10455 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
10456}
10457
10458
10459/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10460FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10461{
10462 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10463 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10464}
10465
10466
10467/* Opcode 0xf3 0x0f 0xdf - invalid */
10468/* Opcode 0xf2 0x0f 0xdf - invalid */
10469
10470/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10471FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
10472{
10473 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10474 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
10475}
10476
10477
10478/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10479FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
10480{
10481 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10482 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
10483}
10484
10485
10486/* Opcode 0xf3 0x0f 0xe0 - invalid */
10487/* Opcode 0xf2 0x0f 0xe0 - invalid */
10488
10489/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10490FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10491{
10492 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10493 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10494}
10495
10496
10497/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10498FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10499{
10500 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10501 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10502}
10503
10504
10505/* Opcode 0xf3 0x0f 0xe1 - invalid */
10506/* Opcode 0xf2 0x0f 0xe1 - invalid */
10507
10508/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10509FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10510{
10511 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10512 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10513}
10514
10515
10516/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10517FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10518{
10519 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10520 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10521}
10522
10523
10524/* Opcode 0xf3 0x0f 0xe2 - invalid */
10525/* Opcode 0xf2 0x0f 0xe2 - invalid */
10526
10527/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10528FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
10529{
10530 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10531 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
10532}
10533
10534
10535/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10536FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
10537{
10538 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10539 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
10540}
10541
10542
10543/* Opcode 0xf3 0x0f 0xe3 - invalid */
10544/* Opcode 0xf2 0x0f 0xe3 - invalid */
10545
10546/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
10547FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
10548{
10549 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10550 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
10551}
10552
10553
10554/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
10555FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
10556{
10557 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10558 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
10559}
10560
10561
10562/* Opcode 0xf3 0x0f 0xe4 - invalid */
10563/* Opcode 0xf2 0x0f 0xe4 - invalid */
10564
10565/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
10566FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
10567{
10568 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10569 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
10570}
10571
10572
10573/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
10574FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
10575{
10576 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10577 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
10578}
10579
10580
10581/* Opcode 0xf3 0x0f 0xe5 - invalid */
10582/* Opcode 0xf2 0x0f 0xe5 - invalid */
10583
10584/* Opcode 0x0f 0xe6 - invalid */
10585/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
10586FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
10587/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
10588FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
10589/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
10590FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
10591
10592
10593/**
10594 * @opcode 0xe7
10595 * @opcodesub !11 mr/reg
10596 * @oppfx none
10597 * @opcpuid sse
10598 * @opgroup og_sse1_cachect
10599 * @opxcpttype none
10600 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
10601 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10602 */
10603FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
10604{
10605 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10607 if (IEM_IS_MODRM_MEM_MODE(bRm))
10608 {
10609 /* Register, memory. */
10610 IEM_MC_BEGIN(0, 2);
10611 IEM_MC_LOCAL(uint64_t, uSrc);
10612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10613
10614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10616 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
10617 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10618
10619 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
10620 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10621 IEM_MC_FPU_TO_MMX_MODE();
10622
10623 IEM_MC_ADVANCE_RIP();
10624 IEM_MC_END();
10625 return VINF_SUCCESS;
10626 }
10627 /**
10628 * @opdone
10629 * @opmnemonic ud0fe7reg
10630 * @opcode 0xe7
10631 * @opcodesub 11 mr/reg
10632 * @oppfx none
10633 * @opunused immediate
10634 * @opcpuid sse
10635 * @optest ->
10636 */
10637 return IEMOP_RAISE_INVALID_OPCODE();
10638}
10639
10640/**
10641 * @opcode 0xe7
10642 * @opcodesub !11 mr/reg
10643 * @oppfx 0x66
10644 * @opcpuid sse2
10645 * @opgroup og_sse2_cachect
10646 * @opxcpttype 1
10647 * @optest op1=-1 op2=2 -> op1=2
10648 * @optest op1=0 op2=-42 -> op1=-42
10649 */
10650FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
10651{
10652 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10654 if (IEM_IS_MODRM_MEM_MODE(bRm))
10655 {
10656 /* Register, memory. */
10657 IEM_MC_BEGIN(0, 2);
10658 IEM_MC_LOCAL(RTUINT128U, uSrc);
10659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10660
10661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10664 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10665
10666 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10667 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10668
10669 IEM_MC_ADVANCE_RIP();
10670 IEM_MC_END();
10671 return VINF_SUCCESS;
10672 }
10673
10674 /**
10675 * @opdone
10676 * @opmnemonic ud660fe7reg
10677 * @opcode 0xe7
10678 * @opcodesub 11 mr/reg
10679 * @oppfx 0x66
10680 * @opunused immediate
10681 * @opcpuid sse
10682 * @optest ->
10683 */
10684 return IEMOP_RAISE_INVALID_OPCODE();
10685}
10686
10687/* Opcode 0xf3 0x0f 0xe7 - invalid */
10688/* Opcode 0xf2 0x0f 0xe7 - invalid */
10689
10690
10691/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
10692FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
10693{
10694 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10695 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
10696}
10697
10698
10699/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
10700FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
10701{
10702 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10703 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
10704}
10705
10706
10707/* Opcode 0xf3 0x0f 0xe8 - invalid */
10708/* Opcode 0xf2 0x0f 0xe8 - invalid */
10709
10710/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
10711FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
10712{
10713 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10714 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
10715}
10716
10717
10718/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
10719FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
10720{
10721 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10722 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
10723}
10724
10725
10726/* Opcode 0xf3 0x0f 0xe9 - invalid */
10727/* Opcode 0xf2 0x0f 0xe9 - invalid */
10728
10729
10730/** Opcode 0x0f 0xea - pminsw Pq, Qq */
10731FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
10732{
10733 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10734 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
10735}
10736
10737
10738/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
10739FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
10740{
10741 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10742 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
10743}
10744
10745
10746/* Opcode 0xf3 0x0f 0xea - invalid */
10747/* Opcode 0xf2 0x0f 0xea - invalid */
10748
10749
10750/** Opcode 0x0f 0xeb - por Pq, Qq */
10751FNIEMOP_DEF(iemOp_por_Pq_Qq)
10752{
10753 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10754 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
10755}
10756
10757
10758/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
10759FNIEMOP_DEF(iemOp_por_Vx_Wx)
10760{
10761 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10762 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
10763}
10764
10765
10766/* Opcode 0xf3 0x0f 0xeb - invalid */
10767/* Opcode 0xf2 0x0f 0xeb - invalid */
10768
10769/** Opcode 0x0f 0xec - paddsb Pq, Qq */
10770FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
10771{
10772 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10773 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
10774}
10775
10776
10777/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
10778FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
10779{
10780 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10781 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
10782}
10783
10784
10785/* Opcode 0xf3 0x0f 0xec - invalid */
10786/* Opcode 0xf2 0x0f 0xec - invalid */
10787
10788/** Opcode 0x0f 0xed - paddsw Pq, Qq */
10789FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
10790{
10791 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10792 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
10793}
10794
10795
10796/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
10797FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
10798{
10799 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10800 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
10801}
10802
10803
10804/* Opcode 0xf3 0x0f 0xed - invalid */
10805/* Opcode 0xf2 0x0f 0xed - invalid */
10806
10807
10808/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
10809FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
10810{
10811 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10812 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
10813}
10814
10815
10816/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
10817FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
10818{
10819 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10820 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
10821}
10822
10823
10824/* Opcode 0xf3 0x0f 0xee - invalid */
10825/* Opcode 0xf2 0x0f 0xee - invalid */
10826
10827
10828/** Opcode 0x0f 0xef - pxor Pq, Qq */
10829FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
10830{
10831 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10832 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
10833}
10834
10835
10836/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
10837FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
10838{
10839 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10840 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
10841}
10842
10843
10844/* Opcode 0xf3 0x0f 0xef - invalid */
10845/* Opcode 0xf2 0x0f 0xef - invalid */
10846
10847/* Opcode 0x0f 0xf0 - invalid */
10848/* Opcode 0x66 0x0f 0xf0 - invalid */
10849/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
10850FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
10851
10852
10853/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
10854FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
10855{
10856 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10857 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
10858}
10859
10860
10861/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
10862FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
10863{
10864 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10865 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
10866}
10867
10868
10869/* Opcode 0xf2 0x0f 0xf1 - invalid */
10870
10871/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
10872FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
10873{
10874 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10875 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
10876}
10877
10878
10879/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
10880FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
10881{
10882 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10883 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
10884}
10885
10886
10887/* Opcode 0xf2 0x0f 0xf2 - invalid */
10888
10889/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
10890FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
10891{
10892 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10893 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
10894}
10895
10896
10897/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
10898FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
10899{
10900 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10901 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
10902}
10903
10904/* Opcode 0xf2 0x0f 0xf3 - invalid */
10905
10906/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
10907FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
10908{
10909 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10910 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
10911}
10912
10913
10914/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
10915FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
10916{
10917 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10918 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
10919}
10920
10921
10922/* Opcode 0xf2 0x0f 0xf4 - invalid */
10923
10924/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
10925FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
10926{
10927 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10928 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
10929}
10930
10931
10932/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
10933FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
10934{
10935 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10936 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
10937}
10938
10939/* Opcode 0xf2 0x0f 0xf5 - invalid */
10940
10941/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
10942FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
10943{
10944 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10945 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
10946}
10947
10948
10949/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
10950FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
10951{
10952 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10953 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
10954}
10955
10956
10957/* Opcode 0xf2 0x0f 0xf6 - invalid */
10958
10959/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
10960FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
10961/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
10962FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
10963/* Opcode 0xf2 0x0f 0xf7 - invalid */
10964
10965
10966/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
10967FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
10968{
10969 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10970 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
10971}
10972
10973
10974/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
10975FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
10976{
10977 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10978 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
10979}
10980
10981
10982/* Opcode 0xf2 0x0f 0xf8 - invalid */
10983
10984
10985/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
10986FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
10987{
10988 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10989 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
10990}
10991
10992
10993/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
10994FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
10995{
10996 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10997 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
10998}
10999
11000
11001/* Opcode 0xf2 0x0f 0xf9 - invalid */
11002
11003
11004/** Opcode 0x0f 0xfa - psubd Pq, Qq */
11005FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
11006{
11007 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11008 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
11009}
11010
11011
11012/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
11013FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
11014{
11015 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11016 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
11017}
11018
11019
11020/* Opcode 0xf2 0x0f 0xfa - invalid */
11021
11022
11023/** Opcode 0x0f 0xfb - psubq Pq, Qq */
11024FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
11025{
11026 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11027 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
11028}
11029
11030
11031/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
11032FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
11033{
11034 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11035 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
11036}
11037
11038
11039/* Opcode 0xf2 0x0f 0xfb - invalid */
11040
11041
11042/** Opcode 0x0f 0xfc - paddb Pq, Qq */
11043FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
11044{
11045 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11046 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
11047}
11048
11049
11050/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
11051FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
11052{
11053 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11054 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
11055}
11056
11057
11058/* Opcode 0xf2 0x0f 0xfc - invalid */
11059
11060
11061/** Opcode 0x0f 0xfd - paddw Pq, Qq */
11062FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
11063{
11064 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11065 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
11066}
11067
11068
11069/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
11070FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
11071{
11072 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11073 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
11074}
11075
11076
11077/* Opcode 0xf2 0x0f 0xfd - invalid */
11078
11079
11080/** Opcode 0x0f 0xfe - paddd Pq, Qq */
11081FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
11082{
11083 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11084 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
11085}
11086
11087
11088/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
11089FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
11090{
11091 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11092 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
11093}
11094
11095
11096/* Opcode 0xf2 0x0f 0xfe - invalid */
11097
11098
11099/** Opcode **** 0x0f 0xff - UD0 */
11100FNIEMOP_DEF(iemOp_ud0)
11101{
11102 IEMOP_MNEMONIC(ud0, "ud0");
11103 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
11104 {
11105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
11106#ifndef TST_IEM_CHECK_MC
11107 if (IEM_IS_MODRM_MEM_MODE(bRm))
11108 {
11109 RTGCPTR GCPtrEff;
11110 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
11111 if (rcStrict != VINF_SUCCESS)
11112 return rcStrict;
11113 }
11114#endif
11115 IEMOP_HLP_DONE_DECODING();
11116 }
11117 return IEMOP_RAISE_INVALID_OPCODE();
11118}
11119
11120
11121
11122/**
11123 * Two byte opcode map, first byte 0x0f.
11124 *
11125 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
11126 * check if it needs updating as well when making changes.
11127 */
11128IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
11129{
11130 /* no prefix, 066h prefix f3h prefix, f2h prefix */
11131 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
11132 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
11133 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
11134 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
11135 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
11136 /* 0x05 */ IEMOP_X4(iemOp_syscall),
11137 /* 0x06 */ IEMOP_X4(iemOp_clts),
11138 /* 0x07 */ IEMOP_X4(iemOp_sysret),
11139 /* 0x08 */ IEMOP_X4(iemOp_invd),
11140 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
11141 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
11142 /* 0x0b */ IEMOP_X4(iemOp_ud2),
11143 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
11144 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
11145 /* 0x0e */ IEMOP_X4(iemOp_femms),
11146 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
11147
11148 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
11149 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
11150 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
11151 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11152 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11153 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11154 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
11155 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11156 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
11157 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
11158 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
11159 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
11160 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
11161 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
11162 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
11163 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
11164
11165 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
11166 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
11167 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
11168 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
11169 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
11170 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11171 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
11172 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11173 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11174 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11175 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
11176 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11177 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
11178 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
11179 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11180 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11181
11182 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
11183 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
11184 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
11185 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
11186 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
11187 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
11188 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
11189 /* 0x37 */ IEMOP_X4(iemOp_getsec),
11190 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
11191 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11192 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
11193 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11194 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11195 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11196 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11197 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11198
11199 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
11200 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
11201 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
11202 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
11203 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
11204 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
11205 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
11206 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
11207 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
11208 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
11209 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
11210 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
11211 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
11212 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
11213 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
11214 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
11215
11216 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11217 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
11218 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
11219 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
11220 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11221 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11222 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11223 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11224 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
11225 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
11226 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
11227 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
11228 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
11229 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
11230 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
11231 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
11232
11233 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11234 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11235 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11236 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11237 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11238 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11239 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11240 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11241 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11242 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11243 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11244 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11245 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11246 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11247 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11248 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
11249
11250 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
11251 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
11252 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
11253 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
11254 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11255 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11256 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11257 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11258
11259 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11260 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11261 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11262 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11263 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
11264 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
11265 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
11266 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
11267
11268 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
11269 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
11270 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
11271 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
11272 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
11273 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
11274 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
11275 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
11276 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
11277 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
11278 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
11279 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
11280 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
11281 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
11282 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
11283 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
11284
11285 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
11286 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
11287 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
11288 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
11289 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
11290 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
11291 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
11292 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
11293 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
11294 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
11295 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
11296 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
11297 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
11298 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
11299 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
11300 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
11301
11302 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
11303 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
11304 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
11305 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
11306 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
11307 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
11308 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
11309 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
11310 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
11311 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
11312 /* 0xaa */ IEMOP_X4(iemOp_rsm),
11313 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
11314 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
11315 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
11316 /* 0xae */ IEMOP_X4(iemOp_Grp15),
11317 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
11318
11319 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
11320 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
11321 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
11322 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
11323 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
11324 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
11325 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
11326 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
11327 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
11328 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
11329 /* 0xba */ IEMOP_X4(iemOp_Grp8),
11330 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
11331 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
11332 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
11333 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
11334 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
11335
11336 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
11337 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
11338 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
11339 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11340 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11341 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11342 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11343 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
11344 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
11345 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
11346 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
11347 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
11348 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
11349 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
11350 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
11351 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
11352
11353 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
11354 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11355 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11356 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11357 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11358 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11359 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
11360 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11361 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11362 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11363 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11364 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11365 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11366 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11367 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11368 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11369
11370 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11371 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11372 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11373 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11374 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11375 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11376 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
11377 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11378 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11379 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11380 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11381 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11382 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11383 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11384 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11385 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11386
11387 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
11388 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11389 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11390 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11391 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11392 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11393 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11394 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11395 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11396 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11397 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11398 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11399 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11400 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11401 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11402 /* 0xff */ IEMOP_X4(iemOp_ud0),
11403};
11404AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
11405
11406/** @} */
11407
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette