VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96335

Last change on this file since 96335 was 96335, checked in by vboxsync, 3 years ago

VMM/IEM: Implement maxps/maxpd instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 383.3 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96335 2022-08-19 12:07:40Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * for instructions introduced with SSE.
149 */
150FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for MMX instructions on the form:
205 * pxxx mm1, mm2/mem64
206 * for instructions introduced with SSE.
207 *
208 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
209 * no FXSAVE state, just the operands.
210 */
211FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
212{
213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
214 if (IEM_IS_MODRM_REG_MODE(bRm))
215 {
216 /*
217 * Register, register.
218 */
219 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
220 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
222 IEM_MC_BEGIN(2, 0);
223 IEM_MC_ARG(uint64_t *, pDst, 0);
224 IEM_MC_ARG(uint64_t const *, pSrc, 1);
225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
226 IEM_MC_PREPARE_FPU_USAGE();
227 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
228 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
229 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
230 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
231 IEM_MC_FPU_TO_MMX_MODE();
232 IEM_MC_ADVANCE_RIP();
233 IEM_MC_END();
234 }
235 else
236 {
237 /*
238 * Register, memory.
239 */
240 IEM_MC_BEGIN(2, 2);
241 IEM_MC_ARG(uint64_t *, pDst, 0);
242 IEM_MC_LOCAL(uint64_t, uSrc);
243 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250
251 IEM_MC_PREPARE_FPU_USAGE();
252 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
253 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
254 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
255 IEM_MC_FPU_TO_MMX_MODE();
256
257 IEM_MC_ADVANCE_RIP();
258 IEM_MC_END();
259 }
260 return VINF_SUCCESS;
261}
262
263
264/**
265 * Common worker for MMX instructions on the form:
266 * pxxx mm1, mm2/mem64
267 * that was introduced with SSE2.
268 */
269FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
278 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
280 IEM_MC_BEGIN(2, 0);
281 IEM_MC_ARG(uint64_t *, pDst, 0);
282 IEM_MC_ARG(uint64_t const *, pSrc, 1);
283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
284 IEM_MC_PREPARE_FPU_USAGE();
285 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
286 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
287 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
288 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
289 IEM_MC_FPU_TO_MMX_MODE();
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 else
294 {
295 /*
296 * Register, memory.
297 */
298 IEM_MC_BEGIN(2, 2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_LOCAL(uint64_t, uSrc);
301 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
303
304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
307 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
308
309 IEM_MC_PREPARE_FPU_USAGE();
310 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313 IEM_MC_FPU_TO_MMX_MODE();
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 }
318 return VINF_SUCCESS;
319}
320
321
322/**
323 * Common worker for SSE2 instructions on the forms:
324 * pxxx xmm1, xmm2/mem128
325 *
326 * Proper alignment of the 128-bit operand is enforced.
327 * Exceptions type 4. SSE2 cpuid checks.
328 *
329 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
330 */
331FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
332{
333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
334 if (IEM_IS_MODRM_REG_MODE(bRm))
335 {
336 /*
337 * Register, register.
338 */
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEM_MC_BEGIN(2, 0);
341 IEM_MC_ARG(PRTUINT128U, pDst, 0);
342 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
344 IEM_MC_PREPARE_SSE_USAGE();
345 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
346 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
347 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 }
351 else
352 {
353 /*
354 * Register, memory.
355 */
356 IEM_MC_BEGIN(2, 2);
357 IEM_MC_ARG(PRTUINT128U, pDst, 0);
358 IEM_MC_LOCAL(RTUINT128U, uSrc);
359 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
361
362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
365 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366
367 IEM_MC_PREPARE_SSE_USAGE();
368 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
369 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
370
371 IEM_MC_ADVANCE_RIP();
372 IEM_MC_END();
373 }
374 return VINF_SUCCESS;
375}
376
377
378/**
379 * Common worker for SSE2 instructions on the forms:
380 * pxxx xmm1, xmm2/mem128
381 *
382 * Proper alignment of the 128-bit operand is enforced.
383 * Exceptions type 4. SSE2 cpuid checks.
384 *
385 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
386 * no FXSAVE state, just the operands.
387 *
388 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
389 */
390FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
391{
392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
393 if (IEM_IS_MODRM_REG_MODE(bRm))
394 {
395 /*
396 * Register, register.
397 */
398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
399 IEM_MC_BEGIN(2, 0);
400 IEM_MC_ARG(PRTUINT128U, pDst, 0);
401 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
403 IEM_MC_PREPARE_SSE_USAGE();
404 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
405 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
406 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 }
410 else
411 {
412 /*
413 * Register, memory.
414 */
415 IEM_MC_BEGIN(2, 2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_LOCAL(RTUINT128U, uSrc);
418 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
420
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
425
426 IEM_MC_PREPARE_SSE_USAGE();
427 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
428 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
429
430 IEM_MC_ADVANCE_RIP();
431 IEM_MC_END();
432 }
433 return VINF_SUCCESS;
434}
435
436
437/**
438 * Common worker for MMX instructions on the forms:
439 * pxxxx mm1, mm2/mem32
440 *
441 * The 2nd operand is the first half of a register, which in the memory case
442 * means a 32-bit memory access.
443 */
444FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
445{
446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
447 if (IEM_IS_MODRM_REG_MODE(bRm))
448 {
449 /*
450 * Register, register.
451 */
452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
453 IEM_MC_BEGIN(2, 0);
454 IEM_MC_ARG(uint64_t *, puDst, 0);
455 IEM_MC_ARG(uint64_t const *, puSrc, 1);
456 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
457 IEM_MC_PREPARE_FPU_USAGE();
458 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
459 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
460 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
461 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
462 IEM_MC_FPU_TO_MMX_MODE();
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 }
466 else
467 {
468 /*
469 * Register, memory.
470 */
471 IEM_MC_BEGIN(2, 2);
472 IEM_MC_ARG(uint64_t *, puDst, 0);
473 IEM_MC_LOCAL(uint64_t, uSrc);
474 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
476
477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
479 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
480 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
481
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
484 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
485 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
486 IEM_MC_FPU_TO_MMX_MODE();
487
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 return VINF_SUCCESS;
492}
493
494
495/**
496 * Common worker for SSE instructions on the forms:
497 * pxxxx xmm1, xmm2/mem128
498 *
499 * The 2nd operand is the first half of a register, which in the memory case
500 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
501 *
502 * Exceptions type 4.
503 */
504FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
505{
506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
507 if (IEM_IS_MODRM_REG_MODE(bRm))
508 {
509 /*
510 * Register, register.
511 */
512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
513 IEM_MC_BEGIN(2, 0);
514 IEM_MC_ARG(PRTUINT128U, puDst, 0);
515 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
518 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
519 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
520 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
521 IEM_MC_ADVANCE_RIP();
522 IEM_MC_END();
523 }
524 else
525 {
526 /*
527 * Register, memory.
528 */
529 IEM_MC_BEGIN(2, 2);
530 IEM_MC_ARG(PRTUINT128U, puDst, 0);
531 IEM_MC_LOCAL(RTUINT128U, uSrc);
532 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
538 /** @todo Most CPUs probably only read the low qword. We read everything to
539 * make sure we apply segmentation and alignment checks correctly.
540 * When we have time, it would be interesting to explore what real
541 * CPUs actually does and whether it will do a TLB load for the high
542 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
543 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
544
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
548
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 return VINF_SUCCESS;
553}
554
555
556/**
557 * Common worker for SSE2 instructions on the forms:
558 * pxxxx xmm1, xmm2/mem128
559 *
560 * The 2nd operand is the first half of a register, which in the memory case
561 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
562 *
563 * Exceptions type 4.
564 */
565FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
566{
567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
568 if (IEM_IS_MODRM_REG_MODE(bRm))
569 {
570 /*
571 * Register, register.
572 */
573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
574 IEM_MC_BEGIN(2, 0);
575 IEM_MC_ARG(PRTUINT128U, puDst, 0);
576 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
579 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
580 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
581 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 }
585 else
586 {
587 /*
588 * Register, memory.
589 */
590 IEM_MC_BEGIN(2, 2);
591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
592 IEM_MC_LOCAL(RTUINT128U, uSrc);
593 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
595
596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
599 /** @todo Most CPUs probably only read the low qword. We read everything to
600 * make sure we apply segmentation and alignment checks correctly.
601 * When we have time, it would be interesting to explore what real
602 * CPUs actually does and whether it will do a TLB load for the high
603 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
604 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
605
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
609
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 return VINF_SUCCESS;
614}
615
616
617/**
618 * Common worker for MMX instructions on the form:
619 * pxxxx mm1, mm2/mem64
620 *
621 * The 2nd operand is the second half of a register, which in the memory case
622 * means a 64-bit memory access for MMX.
623 */
624FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
625{
626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
627 if (IEM_IS_MODRM_REG_MODE(bRm))
628 {
629 /*
630 * Register, register.
631 */
632 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
633 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
635 IEM_MC_BEGIN(2, 0);
636 IEM_MC_ARG(uint64_t *, puDst, 0);
637 IEM_MC_ARG(uint64_t const *, puSrc, 1);
638 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
639 IEM_MC_PREPARE_FPU_USAGE();
640 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
641 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
642 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
643 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
644 IEM_MC_FPU_TO_MMX_MODE();
645 IEM_MC_ADVANCE_RIP();
646 IEM_MC_END();
647 }
648 else
649 {
650 /*
651 * Register, memory.
652 */
653 IEM_MC_BEGIN(2, 2);
654 IEM_MC_ARG(uint64_t *, puDst, 0);
655 IEM_MC_LOCAL(uint64_t, uSrc);
656 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
658
659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
661 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
662 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
663
664 IEM_MC_PREPARE_FPU_USAGE();
665 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
666 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
667 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_ADVANCE_RIP();
671 IEM_MC_END();
672 }
673 return VINF_SUCCESS;
674}
675
676
677/**
678 * Common worker for SSE instructions on the form:
679 * pxxxx xmm1, xmm2/mem128
680 *
681 * The 2nd operand is the second half of a register, which for SSE a 128-bit
682 * aligned access where it may read the full 128 bits or only the upper 64 bits.
683 *
684 * Exceptions type 4.
685 */
686FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
687{
688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
689 if (IEM_IS_MODRM_REG_MODE(bRm))
690 {
691 /*
692 * Register, register.
693 */
694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
695 IEM_MC_BEGIN(2, 0);
696 IEM_MC_ARG(PRTUINT128U, puDst, 0);
697 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
699 IEM_MC_PREPARE_SSE_USAGE();
700 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
701 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
702 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
703 IEM_MC_ADVANCE_RIP();
704 IEM_MC_END();
705 }
706 else
707 {
708 /*
709 * Register, memory.
710 */
711 IEM_MC_BEGIN(2, 2);
712 IEM_MC_ARG(PRTUINT128U, puDst, 0);
713 IEM_MC_LOCAL(RTUINT128U, uSrc);
714 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
716
717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
720 /** @todo Most CPUs probably only read the high qword. We read everything to
721 * make sure we apply segmentation and alignment checks correctly.
722 * When we have time, it would be interesting to explore what real
723 * CPUs actually does and whether it will do a TLB load for the lower
724 * part or skip any associated \#PF. */
725 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
726
727 IEM_MC_PREPARE_SSE_USAGE();
728 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
729 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
730
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 return VINF_SUCCESS;
735}
736
737
738/**
739 * Common worker for SSE instructions on the forms:
740 * pxxs xmm1, xmm2/mem128
741 *
742 * Proper alignment of the 128-bit operand is enforced.
743 * Exceptions type 2. SSE cpuid checks.
744 *
745 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
746 */
747FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
748{
749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
750 if (IEM_IS_MODRM_REG_MODE(bRm))
751 {
752 /*
753 * Register, register.
754 */
755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
758 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
759 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
760 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
761 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
762 IEM_MC_PREPARE_SSE_USAGE();
763 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
764 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
765 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
766 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
767 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
768
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 else
773 {
774 /*
775 * Register, memory.
776 */
777 IEM_MC_BEGIN(3, 2);
778 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
779 IEM_MC_LOCAL(X86XMMREG, uSrc2);
780 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
781 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
782 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
788 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789
790 IEM_MC_PREPARE_SSE_USAGE();
791 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
795
796 IEM_MC_ADVANCE_RIP();
797 IEM_MC_END();
798 }
799 return VINF_SUCCESS;
800}
801
802
803/**
804 * Common worker for SSE2 instructions on the forms:
805 * pxxd xmm1, xmm2/mem128
806 *
807 * Proper alignment of the 128-bit operand is enforced.
808 * Exceptions type 2. SSE cpuid checks.
809 *
810 * @sa iemOpCommonSseFp_FullFull_To_Full
811 */
812FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
813{
814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
815 if (IEM_IS_MODRM_REG_MODE(bRm))
816 {
817 /*
818 * Register, register.
819 */
820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
821 IEM_MC_BEGIN(3, 1);
822 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
823 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
824 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
825 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
826 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
827 IEM_MC_PREPARE_SSE_USAGE();
828 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
829 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
830 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
831 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
832 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
833
834 IEM_MC_ADVANCE_RIP();
835 IEM_MC_END();
836 }
837 else
838 {
839 /*
840 * Register, memory.
841 */
842 IEM_MC_BEGIN(3, 2);
843 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
844 IEM_MC_LOCAL(X86XMMREG, uSrc2);
845 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
846 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
847 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
849
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
853 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854
855 IEM_MC_PREPARE_SSE_USAGE();
856 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
857 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
858 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
859 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
860
861 IEM_MC_ADVANCE_RIP();
862 IEM_MC_END();
863 }
864 return VINF_SUCCESS;
865}
866
867
868/**
869 * Common worker for SSE2 instructions on the form:
870 * pxxxx xmm1, xmm2/mem128
871 *
872 * The 2nd operand is the second half of a register, which for SSE a 128-bit
873 * aligned access where it may read the full 128 bits or only the upper 64 bits.
874 *
875 * Exceptions type 4.
876 */
877FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
878{
879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
880 if (IEM_IS_MODRM_REG_MODE(bRm))
881 {
882 /*
883 * Register, register.
884 */
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_BEGIN(2, 0);
887 IEM_MC_ARG(PRTUINT128U, puDst, 0);
888 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
889 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
890 IEM_MC_PREPARE_SSE_USAGE();
891 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
892 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
893 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
894 IEM_MC_ADVANCE_RIP();
895 IEM_MC_END();
896 }
897 else
898 {
899 /*
900 * Register, memory.
901 */
902 IEM_MC_BEGIN(2, 2);
903 IEM_MC_ARG(PRTUINT128U, puDst, 0);
904 IEM_MC_LOCAL(RTUINT128U, uSrc);
905 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
907
908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
911 /** @todo Most CPUs probably only read the high qword. We read everything to
912 * make sure we apply segmentation and alignment checks correctly.
913 * When we have time, it would be interesting to explore what real
914 * CPUs actually does and whether it will do a TLB load for the lower
915 * part or skip any associated \#PF. */
916 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
917
918 IEM_MC_PREPARE_SSE_USAGE();
919 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
920 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
921
922 IEM_MC_ADVANCE_RIP();
923 IEM_MC_END();
924 }
925 return VINF_SUCCESS;
926}
927
928
929/** Opcode 0x0f 0x00 /0. */
930FNIEMOPRM_DEF(iemOp_Grp6_sldt)
931{
932 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
933 IEMOP_HLP_MIN_286();
934 IEMOP_HLP_NO_REAL_OR_V86_MODE();
935
936 if (IEM_IS_MODRM_REG_MODE(bRm))
937 {
938 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
939 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
940 }
941
942 /* Ignore operand size here, memory refs are always 16-bit. */
943 IEM_MC_BEGIN(2, 0);
944 IEM_MC_ARG(uint16_t, iEffSeg, 0);
945 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
947 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
948 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
949 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
950 IEM_MC_END();
951 return VINF_SUCCESS;
952}
953
954
955/** Opcode 0x0f 0x00 /1. */
956FNIEMOPRM_DEF(iemOp_Grp6_str)
957{
958 IEMOP_MNEMONIC(str, "str Rv/Mw");
959 IEMOP_HLP_MIN_286();
960 IEMOP_HLP_NO_REAL_OR_V86_MODE();
961
962
963 if (IEM_IS_MODRM_REG_MODE(bRm))
964 {
965 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
966 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
967 }
968
969 /* Ignore operand size here, memory refs are always 16-bit. */
970 IEM_MC_BEGIN(2, 0);
971 IEM_MC_ARG(uint16_t, iEffSeg, 0);
972 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
974 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
975 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
976 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
977 IEM_MC_END();
978 return VINF_SUCCESS;
979}
980
981
982/** Opcode 0x0f 0x00 /2. */
983FNIEMOPRM_DEF(iemOp_Grp6_lldt)
984{
985 IEMOP_MNEMONIC(lldt, "lldt Ew");
986 IEMOP_HLP_MIN_286();
987 IEMOP_HLP_NO_REAL_OR_V86_MODE();
988
989 if (IEM_IS_MODRM_REG_MODE(bRm))
990 {
991 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
992 IEM_MC_BEGIN(1, 0);
993 IEM_MC_ARG(uint16_t, u16Sel, 0);
994 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
995 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
996 IEM_MC_END();
997 }
998 else
999 {
1000 IEM_MC_BEGIN(1, 1);
1001 IEM_MC_ARG(uint16_t, u16Sel, 0);
1002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1004 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1005 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1006 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1007 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1008 IEM_MC_END();
1009 }
1010 return VINF_SUCCESS;
1011}
1012
1013
1014/** Opcode 0x0f 0x00 /3. */
1015FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1016{
1017 IEMOP_MNEMONIC(ltr, "ltr Ew");
1018 IEMOP_HLP_MIN_286();
1019 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1020
1021 if (IEM_IS_MODRM_REG_MODE(bRm))
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1024 IEM_MC_BEGIN(1, 0);
1025 IEM_MC_ARG(uint16_t, u16Sel, 0);
1026 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1027 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1028 IEM_MC_END();
1029 }
1030 else
1031 {
1032 IEM_MC_BEGIN(1, 1);
1033 IEM_MC_ARG(uint16_t, u16Sel, 0);
1034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1037 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1038 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1039 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1040 IEM_MC_END();
1041 }
1042 return VINF_SUCCESS;
1043}
1044
1045
1046/** Opcode 0x0f 0x00 /3. */
1047FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1048{
1049 IEMOP_HLP_MIN_286();
1050 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1051
1052 if (IEM_IS_MODRM_REG_MODE(bRm))
1053 {
1054 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1055 IEM_MC_BEGIN(2, 0);
1056 IEM_MC_ARG(uint16_t, u16Sel, 0);
1057 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1058 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1059 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1060 IEM_MC_END();
1061 }
1062 else
1063 {
1064 IEM_MC_BEGIN(2, 1);
1065 IEM_MC_ARG(uint16_t, u16Sel, 0);
1066 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1069 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1070 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1071 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1072 IEM_MC_END();
1073 }
1074 return VINF_SUCCESS;
1075}
1076
1077
1078/** Opcode 0x0f 0x00 /4. */
1079FNIEMOPRM_DEF(iemOp_Grp6_verr)
1080{
1081 IEMOP_MNEMONIC(verr, "verr Ew");
1082 IEMOP_HLP_MIN_286();
1083 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1084}
1085
1086
1087/** Opcode 0x0f 0x00 /5. */
1088FNIEMOPRM_DEF(iemOp_Grp6_verw)
1089{
1090 IEMOP_MNEMONIC(verw, "verw Ew");
1091 IEMOP_HLP_MIN_286();
1092 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1093}
1094
1095
1096/**
1097 * Group 6 jump table.
1098 */
1099IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1100{
1101 iemOp_Grp6_sldt,
1102 iemOp_Grp6_str,
1103 iemOp_Grp6_lldt,
1104 iemOp_Grp6_ltr,
1105 iemOp_Grp6_verr,
1106 iemOp_Grp6_verw,
1107 iemOp_InvalidWithRM,
1108 iemOp_InvalidWithRM
1109};
1110
1111/** Opcode 0x0f 0x00. */
1112FNIEMOP_DEF(iemOp_Grp6)
1113{
1114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1115 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1116}
1117
1118
1119/** Opcode 0x0f 0x01 /0. */
1120FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1121{
1122 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1123 IEMOP_HLP_MIN_286();
1124 IEMOP_HLP_64BIT_OP_SIZE();
1125 IEM_MC_BEGIN(2, 1);
1126 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1127 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1130 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1131 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1132 IEM_MC_END();
1133 return VINF_SUCCESS;
1134}
1135
1136
1137/** Opcode 0x0f 0x01 /0. */
1138FNIEMOP_DEF(iemOp_Grp7_vmcall)
1139{
1140 IEMOP_MNEMONIC(vmcall, "vmcall");
1141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1142
1143 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1144 want all hypercalls regardless of instruction used, and if a
1145 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1146 (NEM/win makes ASSUMPTIONS about this behavior.) */
1147 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1148}
1149
1150
1151/** Opcode 0x0f 0x01 /0. */
1152#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1153FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1154{
1155 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1156 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1157 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1158 IEMOP_HLP_DONE_DECODING();
1159 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1160}
1161#else
1162FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1163{
1164 IEMOP_BITCH_ABOUT_STUB();
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166}
1167#endif
1168
1169
1170/** Opcode 0x0f 0x01 /0. */
1171#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1172FNIEMOP_DEF(iemOp_Grp7_vmresume)
1173{
1174 IEMOP_MNEMONIC(vmresume, "vmresume");
1175 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1176 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1177 IEMOP_HLP_DONE_DECODING();
1178 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1179}
1180#else
1181FNIEMOP_DEF(iemOp_Grp7_vmresume)
1182{
1183 IEMOP_BITCH_ABOUT_STUB();
1184 return IEMOP_RAISE_INVALID_OPCODE();
1185}
1186#endif
1187
1188
1189/** Opcode 0x0f 0x01 /0. */
1190#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1191FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1192{
1193 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1194 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1195 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1196 IEMOP_HLP_DONE_DECODING();
1197 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1198}
1199#else
1200FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1201{
1202 IEMOP_BITCH_ABOUT_STUB();
1203 return IEMOP_RAISE_INVALID_OPCODE();
1204}
1205#endif
1206
1207
1208/** Opcode 0x0f 0x01 /1. */
1209FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1210{
1211 IEMOP_MNEMONIC(sidt, "sidt Ms");
1212 IEMOP_HLP_MIN_286();
1213 IEMOP_HLP_64BIT_OP_SIZE();
1214 IEM_MC_BEGIN(2, 1);
1215 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1216 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1219 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1220 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1221 IEM_MC_END();
1222 return VINF_SUCCESS;
1223}
1224
1225
1226/** Opcode 0x0f 0x01 /1. */
1227FNIEMOP_DEF(iemOp_Grp7_monitor)
1228{
1229 IEMOP_MNEMONIC(monitor, "monitor");
1230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1231 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1232}
1233
1234
1235/** Opcode 0x0f 0x01 /1. */
1236FNIEMOP_DEF(iemOp_Grp7_mwait)
1237{
1238 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1240 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1241}
1242
1243
1244/** Opcode 0x0f 0x01 /2. */
1245FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1246{
1247 IEMOP_MNEMONIC(lgdt, "lgdt");
1248 IEMOP_HLP_64BIT_OP_SIZE();
1249 IEM_MC_BEGIN(3, 1);
1250 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1251 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1252 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1255 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1256 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1257 IEM_MC_END();
1258 return VINF_SUCCESS;
1259}
1260
1261
1262/** Opcode 0x0f 0x01 0xd0. */
1263FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1264{
1265 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1266 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1267 {
1268 /** @todo r=ramshankar: We should use
1269 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1270 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1271 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1272 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1273 }
1274 return IEMOP_RAISE_INVALID_OPCODE();
1275}
1276
1277
1278/** Opcode 0x0f 0x01 0xd1. */
1279FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1280{
1281 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1282 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1283 {
1284 /** @todo r=ramshankar: We should use
1285 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1286 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1287 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1288 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1289 }
1290 return IEMOP_RAISE_INVALID_OPCODE();
1291}
1292
1293
1294/** Opcode 0x0f 0x01 /3. */
1295FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1296{
1297 IEMOP_MNEMONIC(lidt, "lidt");
1298 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1299 ? IEMMODE_64BIT
1300 : pVCpu->iem.s.enmEffOpSize;
1301 IEM_MC_BEGIN(3, 1);
1302 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1303 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1304 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1307 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1308 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1309 IEM_MC_END();
1310 return VINF_SUCCESS;
1311}
1312
1313
1314/** Opcode 0x0f 0x01 0xd8. */
1315#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1316FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1317{
1318 IEMOP_MNEMONIC(vmrun, "vmrun");
1319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1321}
1322#else
1323FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1324#endif
1325
1326/** Opcode 0x0f 0x01 0xd9. */
1327FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1328{
1329 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1331
1332 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1333 want all hypercalls regardless of instruction used, and if a
1334 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1335 (NEM/win makes ASSUMPTIONS about this behavior.) */
1336 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1337}
1338
1339/** Opcode 0x0f 0x01 0xda. */
1340#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1341FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1342{
1343 IEMOP_MNEMONIC(vmload, "vmload");
1344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1345 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1346}
1347#else
1348FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1349#endif
1350
1351
1352/** Opcode 0x0f 0x01 0xdb. */
1353#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1354FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1355{
1356 IEMOP_MNEMONIC(vmsave, "vmsave");
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1358 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1359}
1360#else
1361FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1362#endif
1363
1364
1365/** Opcode 0x0f 0x01 0xdc. */
1366#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1367FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1368{
1369 IEMOP_MNEMONIC(stgi, "stgi");
1370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1371 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1372}
1373#else
1374FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1375#endif
1376
1377
1378/** Opcode 0x0f 0x01 0xdd. */
1379#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1380FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1381{
1382 IEMOP_MNEMONIC(clgi, "clgi");
1383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1384 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1385}
1386#else
1387FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1388#endif
1389
1390
1391/** Opcode 0x0f 0x01 0xdf. */
1392#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1393FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1394{
1395 IEMOP_MNEMONIC(invlpga, "invlpga");
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1397 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1398}
1399#else
1400FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1401#endif
1402
1403
1404/** Opcode 0x0f 0x01 0xde. */
1405#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1406FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1407{
1408 IEMOP_MNEMONIC(skinit, "skinit");
1409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1411}
1412#else
1413FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1414#endif
1415
1416
1417/** Opcode 0x0f 0x01 /4. */
1418FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1419{
1420 IEMOP_MNEMONIC(smsw, "smsw");
1421 IEMOP_HLP_MIN_286();
1422 if (IEM_IS_MODRM_REG_MODE(bRm))
1423 {
1424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1425 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1426 }
1427
1428 /* Ignore operand size here, memory refs are always 16-bit. */
1429 IEM_MC_BEGIN(2, 0);
1430 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1431 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1435 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1436 IEM_MC_END();
1437 return VINF_SUCCESS;
1438}
1439
1440
1441/** Opcode 0x0f 0x01 /6. */
1442FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1443{
1444 /* The operand size is effectively ignored, all is 16-bit and only the
1445 lower 3-bits are used. */
1446 IEMOP_MNEMONIC(lmsw, "lmsw");
1447 IEMOP_HLP_MIN_286();
1448 if (IEM_IS_MODRM_REG_MODE(bRm))
1449 {
1450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1451 IEM_MC_BEGIN(2, 0);
1452 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1453 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1454 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1455 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1456 IEM_MC_END();
1457 }
1458 else
1459 {
1460 IEM_MC_BEGIN(2, 0);
1461 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1462 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1465 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1466 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1467 IEM_MC_END();
1468 }
1469 return VINF_SUCCESS;
1470}
1471
1472
1473/** Opcode 0x0f 0x01 /7. */
1474FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1475{
1476 IEMOP_MNEMONIC(invlpg, "invlpg");
1477 IEMOP_HLP_MIN_486();
1478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1479 IEM_MC_BEGIN(1, 1);
1480 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1482 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1483 IEM_MC_END();
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/** Opcode 0x0f 0x01 /7. */
1489FNIEMOP_DEF(iemOp_Grp7_swapgs)
1490{
1491 IEMOP_MNEMONIC(swapgs, "swapgs");
1492 IEMOP_HLP_ONLY_64BIT();
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1495}
1496
1497
1498/** Opcode 0x0f 0x01 /7. */
1499FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1500{
1501 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1504}
1505
1506
1507/**
1508 * Group 7 jump table, memory variant.
1509 */
1510IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1511{
1512 iemOp_Grp7_sgdt,
1513 iemOp_Grp7_sidt,
1514 iemOp_Grp7_lgdt,
1515 iemOp_Grp7_lidt,
1516 iemOp_Grp7_smsw,
1517 iemOp_InvalidWithRM,
1518 iemOp_Grp7_lmsw,
1519 iemOp_Grp7_invlpg
1520};
1521
1522
1523/** Opcode 0x0f 0x01. */
1524FNIEMOP_DEF(iemOp_Grp7)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if (IEM_IS_MODRM_MEM_MODE(bRm))
1528 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1529
1530 switch (IEM_GET_MODRM_REG_8(bRm))
1531 {
1532 case 0:
1533 switch (IEM_GET_MODRM_RM_8(bRm))
1534 {
1535 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1536 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1537 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1538 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1539 }
1540 return IEMOP_RAISE_INVALID_OPCODE();
1541
1542 case 1:
1543 switch (IEM_GET_MODRM_RM_8(bRm))
1544 {
1545 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1546 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1547 }
1548 return IEMOP_RAISE_INVALID_OPCODE();
1549
1550 case 2:
1551 switch (IEM_GET_MODRM_RM_8(bRm))
1552 {
1553 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1554 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1555 }
1556 return IEMOP_RAISE_INVALID_OPCODE();
1557
1558 case 3:
1559 switch (IEM_GET_MODRM_RM_8(bRm))
1560 {
1561 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1562 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1563 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1564 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1565 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1566 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1567 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1568 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1570 }
1571
1572 case 4:
1573 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1574
1575 case 5:
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577
1578 case 6:
1579 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1580
1581 case 7:
1582 switch (IEM_GET_MODRM_RM_8(bRm))
1583 {
1584 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1585 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1586 }
1587 return IEMOP_RAISE_INVALID_OPCODE();
1588
1589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1590 }
1591}
1592
1593/** Opcode 0x0f 0x00 /3. */
1594FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1595{
1596 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598
1599 if (IEM_IS_MODRM_REG_MODE(bRm))
1600 {
1601 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1602 switch (pVCpu->iem.s.enmEffOpSize)
1603 {
1604 case IEMMODE_16BIT:
1605 {
1606 IEM_MC_BEGIN(3, 0);
1607 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1608 IEM_MC_ARG(uint16_t, u16Sel, 1);
1609 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1610
1611 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1612 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1613 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1614
1615 IEM_MC_END();
1616 return VINF_SUCCESS;
1617 }
1618
1619 case IEMMODE_32BIT:
1620 case IEMMODE_64BIT:
1621 {
1622 IEM_MC_BEGIN(3, 0);
1623 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1624 IEM_MC_ARG(uint16_t, u16Sel, 1);
1625 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1626
1627 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1628 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1629 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1630
1631 IEM_MC_END();
1632 return VINF_SUCCESS;
1633 }
1634
1635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1636 }
1637 }
1638 else
1639 {
1640 switch (pVCpu->iem.s.enmEffOpSize)
1641 {
1642 case IEMMODE_16BIT:
1643 {
1644 IEM_MC_BEGIN(3, 1);
1645 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1646 IEM_MC_ARG(uint16_t, u16Sel, 1);
1647 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1649
1650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1651 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1652
1653 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1654 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1655 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1656
1657 IEM_MC_END();
1658 return VINF_SUCCESS;
1659 }
1660
1661 case IEMMODE_32BIT:
1662 case IEMMODE_64BIT:
1663 {
1664 IEM_MC_BEGIN(3, 1);
1665 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1666 IEM_MC_ARG(uint16_t, u16Sel, 1);
1667 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1669
1670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1671 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1672/** @todo testcase: make sure it's a 16-bit read. */
1673
1674 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1675 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1676 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1677
1678 IEM_MC_END();
1679 return VINF_SUCCESS;
1680 }
1681
1682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1683 }
1684 }
1685}
1686
1687
1688
1689/** Opcode 0x0f 0x02. */
1690FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1691{
1692 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1693 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1694}
1695
1696
1697/** Opcode 0x0f 0x03. */
1698FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1699{
1700 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1701 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1702}
1703
1704
1705/** Opcode 0x0f 0x05. */
1706FNIEMOP_DEF(iemOp_syscall)
1707{
1708 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1710 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1711}
1712
1713
1714/** Opcode 0x0f 0x06. */
1715FNIEMOP_DEF(iemOp_clts)
1716{
1717 IEMOP_MNEMONIC(clts, "clts");
1718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1719 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1720}
1721
1722
1723/** Opcode 0x0f 0x07. */
1724FNIEMOP_DEF(iemOp_sysret)
1725{
1726 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1728 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1729}
1730
1731
1732/** Opcode 0x0f 0x08. */
1733FNIEMOP_DEF(iemOp_invd)
1734{
1735 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1736 IEMOP_HLP_MIN_486();
1737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1738 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1739}
1740
1741
1742/** Opcode 0x0f 0x09. */
1743FNIEMOP_DEF(iemOp_wbinvd)
1744{
1745 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1746 IEMOP_HLP_MIN_486();
1747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1748 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1749}
1750
1751
1752/** Opcode 0x0f 0x0b. */
1753FNIEMOP_DEF(iemOp_ud2)
1754{
1755 IEMOP_MNEMONIC(ud2, "ud2");
1756 return IEMOP_RAISE_INVALID_OPCODE();
1757}
1758
1759/** Opcode 0x0f 0x0d. */
1760FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1761{
1762 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1763 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1764 {
1765 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767 }
1768
1769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1770 if (IEM_IS_MODRM_REG_MODE(bRm))
1771 {
1772 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1773 return IEMOP_RAISE_INVALID_OPCODE();
1774 }
1775
1776 switch (IEM_GET_MODRM_REG_8(bRm))
1777 {
1778 case 2: /* Aliased to /0 for the time being. */
1779 case 4: /* Aliased to /0 for the time being. */
1780 case 5: /* Aliased to /0 for the time being. */
1781 case 6: /* Aliased to /0 for the time being. */
1782 case 7: /* Aliased to /0 for the time being. */
1783 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1784 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1785 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1787 }
1788
1789 IEM_MC_BEGIN(0, 1);
1790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1793 /* Currently a NOP. */
1794 NOREF(GCPtrEffSrc);
1795 IEM_MC_ADVANCE_RIP();
1796 IEM_MC_END();
1797 return VINF_SUCCESS;
1798}
1799
1800
1801/** Opcode 0x0f 0x0e. */
1802FNIEMOP_DEF(iemOp_femms)
1803{
1804 IEMOP_MNEMONIC(femms, "femms");
1805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1806
1807 IEM_MC_BEGIN(0,0);
1808 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1809 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1810 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1811 IEM_MC_FPU_FROM_MMX_MODE();
1812 IEM_MC_ADVANCE_RIP();
1813 IEM_MC_END();
1814 return VINF_SUCCESS;
1815}
1816
1817
1818/** Opcode 0x0f 0x0f. */
1819FNIEMOP_DEF(iemOp_3Dnow)
1820{
1821 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1822 {
1823 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1824 return IEMOP_RAISE_INVALID_OPCODE();
1825 }
1826
1827#ifdef IEM_WITH_3DNOW
1828 /* This is pretty sparse, use switch instead of table. */
1829 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1830 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1831#else
1832 IEMOP_BITCH_ABOUT_STUB();
1833 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1834#endif
1835}
1836
1837
1838/**
1839 * @opcode 0x10
1840 * @oppfx none
1841 * @opcpuid sse
1842 * @opgroup og_sse_simdfp_datamove
1843 * @opxcpttype 4UA
1844 * @optest op1=1 op2=2 -> op1=2
1845 * @optest op1=0 op2=-22 -> op1=-22
1846 */
1847FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1848{
1849 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1851 if (IEM_IS_MODRM_REG_MODE(bRm))
1852 {
1853 /*
1854 * Register, register.
1855 */
1856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1857 IEM_MC_BEGIN(0, 0);
1858 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1859 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1860 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1861 IEM_GET_MODRM_RM(pVCpu, bRm));
1862 IEM_MC_ADVANCE_RIP();
1863 IEM_MC_END();
1864 }
1865 else
1866 {
1867 /*
1868 * Memory, register.
1869 */
1870 IEM_MC_BEGIN(0, 2);
1871 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1873
1874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1876 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1877 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1878
1879 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1881
1882 IEM_MC_ADVANCE_RIP();
1883 IEM_MC_END();
1884 }
1885 return VINF_SUCCESS;
1886
1887}
1888
1889
1890/**
1891 * @opcode 0x10
1892 * @oppfx 0x66
1893 * @opcpuid sse2
1894 * @opgroup og_sse2_pcksclr_datamove
1895 * @opxcpttype 4UA
1896 * @optest op1=1 op2=2 -> op1=2
1897 * @optest op1=0 op2=-42 -> op1=-42
1898 */
1899FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1900{
1901 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1903 if (IEM_IS_MODRM_REG_MODE(bRm))
1904 {
1905 /*
1906 * Register, register.
1907 */
1908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1909 IEM_MC_BEGIN(0, 0);
1910 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1911 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1912 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1913 IEM_GET_MODRM_RM(pVCpu, bRm));
1914 IEM_MC_ADVANCE_RIP();
1915 IEM_MC_END();
1916 }
1917 else
1918 {
1919 /*
1920 * Memory, register.
1921 */
1922 IEM_MC_BEGIN(0, 2);
1923 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1929 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1930
1931 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1932 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1933
1934 IEM_MC_ADVANCE_RIP();
1935 IEM_MC_END();
1936 }
1937 return VINF_SUCCESS;
1938}
1939
1940
1941/**
1942 * @opcode 0x10
1943 * @oppfx 0xf3
1944 * @opcpuid sse
1945 * @opgroup og_sse_simdfp_datamove
1946 * @opxcpttype 5
1947 * @optest op1=1 op2=2 -> op1=2
1948 * @optest op1=0 op2=-22 -> op1=-22
1949 */
1950FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1951{
1952 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1954 if (IEM_IS_MODRM_REG_MODE(bRm))
1955 {
1956 /*
1957 * Register, register.
1958 */
1959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1960 IEM_MC_BEGIN(0, 1);
1961 IEM_MC_LOCAL(uint32_t, uSrc);
1962
1963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1964 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1965 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1966 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1967
1968 IEM_MC_ADVANCE_RIP();
1969 IEM_MC_END();
1970 }
1971 else
1972 {
1973 /*
1974 * Memory, register.
1975 */
1976 IEM_MC_BEGIN(0, 2);
1977 IEM_MC_LOCAL(uint32_t, uSrc);
1978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1979
1980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1984
1985 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1986 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1987
1988 IEM_MC_ADVANCE_RIP();
1989 IEM_MC_END();
1990 }
1991 return VINF_SUCCESS;
1992}
1993
1994
1995/**
1996 * @opcode 0x10
1997 * @oppfx 0xf2
1998 * @opcpuid sse2
1999 * @opgroup og_sse2_pcksclr_datamove
2000 * @opxcpttype 5
2001 * @optest op1=1 op2=2 -> op1=2
2002 * @optest op1=0 op2=-42 -> op1=-42
2003 */
2004FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2005{
2006 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if (IEM_IS_MODRM_REG_MODE(bRm))
2009 {
2010 /*
2011 * Register, register.
2012 */
2013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2014 IEM_MC_BEGIN(0, 1);
2015 IEM_MC_LOCAL(uint64_t, uSrc);
2016
2017 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2019 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2020 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2021
2022 IEM_MC_ADVANCE_RIP();
2023 IEM_MC_END();
2024 }
2025 else
2026 {
2027 /*
2028 * Memory, register.
2029 */
2030 IEM_MC_BEGIN(0, 2);
2031 IEM_MC_LOCAL(uint64_t, uSrc);
2032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2033
2034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2038
2039 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2040 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2041
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 }
2045 return VINF_SUCCESS;
2046}
2047
2048
2049/**
2050 * @opcode 0x11
2051 * @oppfx none
2052 * @opcpuid sse
2053 * @opgroup og_sse_simdfp_datamove
2054 * @opxcpttype 4UA
2055 * @optest op1=1 op2=2 -> op1=2
2056 * @optest op1=0 op2=-42 -> op1=-42
2057 */
2058FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2059{
2060 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2062 if (IEM_IS_MODRM_REG_MODE(bRm))
2063 {
2064 /*
2065 * Register, register.
2066 */
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068 IEM_MC_BEGIN(0, 0);
2069 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2070 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2071 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2072 IEM_GET_MODRM_REG(pVCpu, bRm));
2073 IEM_MC_ADVANCE_RIP();
2074 IEM_MC_END();
2075 }
2076 else
2077 {
2078 /*
2079 * Memory, register.
2080 */
2081 IEM_MC_BEGIN(0, 2);
2082 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2084
2085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2087 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2088 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2089
2090 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2091 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2092
2093 IEM_MC_ADVANCE_RIP();
2094 IEM_MC_END();
2095 }
2096 return VINF_SUCCESS;
2097}
2098
2099
2100/**
2101 * @opcode 0x11
2102 * @oppfx 0x66
2103 * @opcpuid sse2
2104 * @opgroup og_sse2_pcksclr_datamove
2105 * @opxcpttype 4UA
2106 * @optest op1=1 op2=2 -> op1=2
2107 * @optest op1=0 op2=-42 -> op1=-42
2108 */
2109FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2110{
2111 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2113 if (IEM_IS_MODRM_REG_MODE(bRm))
2114 {
2115 /*
2116 * Register, register.
2117 */
2118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2119 IEM_MC_BEGIN(0, 0);
2120 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2121 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2122 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2123 IEM_GET_MODRM_REG(pVCpu, bRm));
2124 IEM_MC_ADVANCE_RIP();
2125 IEM_MC_END();
2126 }
2127 else
2128 {
2129 /*
2130 * Memory, register.
2131 */
2132 IEM_MC_BEGIN(0, 2);
2133 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2135
2136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2140
2141 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2142 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2143
2144 IEM_MC_ADVANCE_RIP();
2145 IEM_MC_END();
2146 }
2147 return VINF_SUCCESS;
2148}
2149
2150
2151/**
2152 * @opcode 0x11
2153 * @oppfx 0xf3
2154 * @opcpuid sse
2155 * @opgroup og_sse_simdfp_datamove
2156 * @opxcpttype 5
2157 * @optest op1=1 op2=2 -> op1=2
2158 * @optest op1=0 op2=-22 -> op1=-22
2159 */
2160FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2161{
2162 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2164 if (IEM_IS_MODRM_REG_MODE(bRm))
2165 {
2166 /*
2167 * Register, register.
2168 */
2169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2170 IEM_MC_BEGIN(0, 1);
2171 IEM_MC_LOCAL(uint32_t, uSrc);
2172
2173 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2174 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2175 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2176 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2177
2178 IEM_MC_ADVANCE_RIP();
2179 IEM_MC_END();
2180 }
2181 else
2182 {
2183 /*
2184 * Memory, register.
2185 */
2186 IEM_MC_BEGIN(0, 2);
2187 IEM_MC_LOCAL(uint32_t, uSrc);
2188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2189
2190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2192 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2193 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2194
2195 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2196 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2197
2198 IEM_MC_ADVANCE_RIP();
2199 IEM_MC_END();
2200 }
2201 return VINF_SUCCESS;
2202}
2203
2204
2205/**
2206 * @opcode 0x11
2207 * @oppfx 0xf2
2208 * @opcpuid sse2
2209 * @opgroup og_sse2_pcksclr_datamove
2210 * @opxcpttype 5
2211 * @optest op1=1 op2=2 -> op1=2
2212 * @optest op1=0 op2=-42 -> op1=-42
2213 */
2214FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2215{
2216 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2218 if (IEM_IS_MODRM_REG_MODE(bRm))
2219 {
2220 /*
2221 * Register, register.
2222 */
2223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2224 IEM_MC_BEGIN(0, 1);
2225 IEM_MC_LOCAL(uint64_t, uSrc);
2226
2227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2229 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2230 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2231
2232 IEM_MC_ADVANCE_RIP();
2233 IEM_MC_END();
2234 }
2235 else
2236 {
2237 /*
2238 * Memory, register.
2239 */
2240 IEM_MC_BEGIN(0, 2);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2243
2244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2246 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2248
2249 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2250 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2251
2252 IEM_MC_ADVANCE_RIP();
2253 IEM_MC_END();
2254 }
2255 return VINF_SUCCESS;
2256}
2257
2258
2259FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2260{
2261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2262 if (IEM_IS_MODRM_REG_MODE(bRm))
2263 {
2264 /**
2265 * @opcode 0x12
2266 * @opcodesub 11 mr/reg
2267 * @oppfx none
2268 * @opcpuid sse
2269 * @opgroup og_sse_simdfp_datamove
2270 * @opxcpttype 5
2271 * @optest op1=1 op2=2 -> op1=2
2272 * @optest op1=0 op2=-42 -> op1=-42
2273 */
2274 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2275
2276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2277 IEM_MC_BEGIN(0, 1);
2278 IEM_MC_LOCAL(uint64_t, uSrc);
2279
2280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2281 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2282 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2283 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2284
2285 IEM_MC_ADVANCE_RIP();
2286 IEM_MC_END();
2287 }
2288 else
2289 {
2290 /**
2291 * @opdone
2292 * @opcode 0x12
2293 * @opcodesub !11 mr/reg
2294 * @oppfx none
2295 * @opcpuid sse
2296 * @opgroup og_sse_simdfp_datamove
2297 * @opxcpttype 5
2298 * @optest op1=1 op2=2 -> op1=2
2299 * @optest op1=0 op2=-42 -> op1=-42
2300 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2301 */
2302 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2303
2304 IEM_MC_BEGIN(0, 2);
2305 IEM_MC_LOCAL(uint64_t, uSrc);
2306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2307
2308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312
2313 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2314 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2315
2316 IEM_MC_ADVANCE_RIP();
2317 IEM_MC_END();
2318 }
2319 return VINF_SUCCESS;
2320}
2321
2322
2323/**
2324 * @opcode 0x12
2325 * @opcodesub !11 mr/reg
2326 * @oppfx 0x66
2327 * @opcpuid sse2
2328 * @opgroup og_sse2_pcksclr_datamove
2329 * @opxcpttype 5
2330 * @optest op1=1 op2=2 -> op1=2
2331 * @optest op1=0 op2=-42 -> op1=-42
2332 */
2333FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2334{
2335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2336 if (IEM_IS_MODRM_MEM_MODE(bRm))
2337 {
2338 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2339
2340 IEM_MC_BEGIN(0, 2);
2341 IEM_MC_LOCAL(uint64_t, uSrc);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343
2344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2346 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2348
2349 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2350 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2351
2352 IEM_MC_ADVANCE_RIP();
2353 IEM_MC_END();
2354 return VINF_SUCCESS;
2355 }
2356
2357 /**
2358 * @opdone
2359 * @opmnemonic ud660f12m3
2360 * @opcode 0x12
2361 * @opcodesub 11 mr/reg
2362 * @oppfx 0x66
2363 * @opunused immediate
2364 * @opcpuid sse
2365 * @optest ->
2366 */
2367 return IEMOP_RAISE_INVALID_OPCODE();
2368}
2369
2370
2371/**
2372 * @opcode 0x12
2373 * @oppfx 0xf3
2374 * @opcpuid sse3
2375 * @opgroup og_sse3_pcksclr_datamove
2376 * @opxcpttype 4
2377 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2378 * op1=0x00000002000000020000000100000001
2379 */
2380FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2381{
2382 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2384 if (IEM_IS_MODRM_REG_MODE(bRm))
2385 {
2386 /*
2387 * Register, register.
2388 */
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2390 IEM_MC_BEGIN(2, 0);
2391 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2392 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2393
2394 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2395 IEM_MC_PREPARE_SSE_USAGE();
2396
2397 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2398 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2399 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2400
2401 IEM_MC_ADVANCE_RIP();
2402 IEM_MC_END();
2403 }
2404 else
2405 {
2406 /*
2407 * Register, memory.
2408 */
2409 IEM_MC_BEGIN(2, 2);
2410 IEM_MC_LOCAL(RTUINT128U, uSrc);
2411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2412 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2413 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2414
2415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2417 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2418 IEM_MC_PREPARE_SSE_USAGE();
2419
2420 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2421 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2423
2424 IEM_MC_ADVANCE_RIP();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430
2431/**
2432 * @opcode 0x12
2433 * @oppfx 0xf2
2434 * @opcpuid sse3
2435 * @opgroup og_sse3_pcksclr_datamove
2436 * @opxcpttype 5
2437 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2438 * op1=0x22222222111111112222222211111111
2439 */
2440FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2441{
2442 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2444 if (IEM_IS_MODRM_REG_MODE(bRm))
2445 {
2446 /*
2447 * Register, register.
2448 */
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_BEGIN(2, 0);
2451 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2452 IEM_MC_ARG(uint64_t, uSrc, 1);
2453
2454 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2455 IEM_MC_PREPARE_SSE_USAGE();
2456
2457 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2458 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2459 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2460
2461 IEM_MC_ADVANCE_RIP();
2462 IEM_MC_END();
2463 }
2464 else
2465 {
2466 /*
2467 * Register, memory.
2468 */
2469 IEM_MC_BEGIN(2, 2);
2470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2471 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2472 IEM_MC_ARG(uint64_t, uSrc, 1);
2473
2474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2476 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2477 IEM_MC_PREPARE_SSE_USAGE();
2478
2479 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2480 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2481 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2482
2483 IEM_MC_ADVANCE_RIP();
2484 IEM_MC_END();
2485 }
2486 return VINF_SUCCESS;
2487}
2488
2489
2490/**
2491 * @opcode 0x13
2492 * @opcodesub !11 mr/reg
2493 * @oppfx none
2494 * @opcpuid sse
2495 * @opgroup og_sse_simdfp_datamove
2496 * @opxcpttype 5
2497 * @optest op1=1 op2=2 -> op1=2
2498 * @optest op1=0 op2=-42 -> op1=-42
2499 */
2500FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2501{
2502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2503 if (IEM_IS_MODRM_MEM_MODE(bRm))
2504 {
2505 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2506
2507 IEM_MC_BEGIN(0, 2);
2508 IEM_MC_LOCAL(uint64_t, uSrc);
2509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2510
2511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2515
2516 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2517 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2518
2519 IEM_MC_ADVANCE_RIP();
2520 IEM_MC_END();
2521 return VINF_SUCCESS;
2522 }
2523
2524 /**
2525 * @opdone
2526 * @opmnemonic ud0f13m3
2527 * @opcode 0x13
2528 * @opcodesub 11 mr/reg
2529 * @oppfx none
2530 * @opunused immediate
2531 * @opcpuid sse
2532 * @optest ->
2533 */
2534 return IEMOP_RAISE_INVALID_OPCODE();
2535}
2536
2537
2538/**
2539 * @opcode 0x13
2540 * @opcodesub !11 mr/reg
2541 * @oppfx 0x66
2542 * @opcpuid sse2
2543 * @opgroup og_sse2_pcksclr_datamove
2544 * @opxcpttype 5
2545 * @optest op1=1 op2=2 -> op1=2
2546 * @optest op1=0 op2=-42 -> op1=-42
2547 */
2548FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2549{
2550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2551 if (IEM_IS_MODRM_MEM_MODE(bRm))
2552 {
2553 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2554 IEM_MC_BEGIN(0, 2);
2555 IEM_MC_LOCAL(uint64_t, uSrc);
2556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2557
2558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2561 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2562
2563 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2564 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2565
2566 IEM_MC_ADVANCE_RIP();
2567 IEM_MC_END();
2568 return VINF_SUCCESS;
2569 }
2570
2571 /**
2572 * @opdone
2573 * @opmnemonic ud660f13m3
2574 * @opcode 0x13
2575 * @opcodesub 11 mr/reg
2576 * @oppfx 0x66
2577 * @opunused immediate
2578 * @opcpuid sse
2579 * @optest ->
2580 */
2581 return IEMOP_RAISE_INVALID_OPCODE();
2582}
2583
2584
2585/**
2586 * @opmnemonic udf30f13
2587 * @opcode 0x13
2588 * @oppfx 0xf3
2589 * @opunused intel-modrm
2590 * @opcpuid sse
2591 * @optest ->
2592 * @opdone
2593 */
2594
2595/**
2596 * @opmnemonic udf20f13
2597 * @opcode 0x13
2598 * @oppfx 0xf2
2599 * @opunused intel-modrm
2600 * @opcpuid sse
2601 * @optest ->
2602 * @opdone
2603 */
2604
2605/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2606FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2607{
2608 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2609 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2610}
2611
2612
2613/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2614FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2615{
2616 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2617 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2618}
2619
2620
2621/**
2622 * @opdone
2623 * @opmnemonic udf30f14
2624 * @opcode 0x14
2625 * @oppfx 0xf3
2626 * @opunused intel-modrm
2627 * @opcpuid sse
2628 * @optest ->
2629 * @opdone
2630 */
2631
2632/**
2633 * @opmnemonic udf20f14
2634 * @opcode 0x14
2635 * @oppfx 0xf2
2636 * @opunused intel-modrm
2637 * @opcpuid sse
2638 * @optest ->
2639 * @opdone
2640 */
2641
2642/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2643FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2644{
2645 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2646 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2647}
2648
2649
2650/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2651FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2652{
2653 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2654 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2655}
2656
2657
2658/* Opcode 0xf3 0x0f 0x15 - invalid */
2659/* Opcode 0xf2 0x0f 0x15 - invalid */
2660
2661/**
2662 * @opdone
2663 * @opmnemonic udf30f15
2664 * @opcode 0x15
2665 * @oppfx 0xf3
2666 * @opunused intel-modrm
2667 * @opcpuid sse
2668 * @optest ->
2669 * @opdone
2670 */
2671
2672/**
2673 * @opmnemonic udf20f15
2674 * @opcode 0x15
2675 * @oppfx 0xf2
2676 * @opunused intel-modrm
2677 * @opcpuid sse
2678 * @optest ->
2679 * @opdone
2680 */
2681
2682FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2683{
2684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2685 if (IEM_IS_MODRM_REG_MODE(bRm))
2686 {
2687 /**
2688 * @opcode 0x16
2689 * @opcodesub 11 mr/reg
2690 * @oppfx none
2691 * @opcpuid sse
2692 * @opgroup og_sse_simdfp_datamove
2693 * @opxcpttype 5
2694 * @optest op1=1 op2=2 -> op1=2
2695 * @optest op1=0 op2=-42 -> op1=-42
2696 */
2697 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2698
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(0, 1);
2701 IEM_MC_LOCAL(uint64_t, uSrc);
2702
2703 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2704 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2705 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2706 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2707
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /**
2714 * @opdone
2715 * @opcode 0x16
2716 * @opcodesub !11 mr/reg
2717 * @oppfx none
2718 * @opcpuid sse
2719 * @opgroup og_sse_simdfp_datamove
2720 * @opxcpttype 5
2721 * @optest op1=1 op2=2 -> op1=2
2722 * @optest op1=0 op2=-42 -> op1=-42
2723 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2724 */
2725 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2726
2727 IEM_MC_BEGIN(0, 2);
2728 IEM_MC_LOCAL(uint64_t, uSrc);
2729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2730
2731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2733 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2734 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2735
2736 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2737 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2738
2739 IEM_MC_ADVANCE_RIP();
2740 IEM_MC_END();
2741 }
2742 return VINF_SUCCESS;
2743}
2744
2745
2746/**
2747 * @opcode 0x16
2748 * @opcodesub !11 mr/reg
2749 * @oppfx 0x66
2750 * @opcpuid sse2
2751 * @opgroup og_sse2_pcksclr_datamove
2752 * @opxcpttype 5
2753 * @optest op1=1 op2=2 -> op1=2
2754 * @optest op1=0 op2=-42 -> op1=-42
2755 */
2756FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2757{
2758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2759 if (IEM_IS_MODRM_MEM_MODE(bRm))
2760 {
2761 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2762 IEM_MC_BEGIN(0, 2);
2763 IEM_MC_LOCAL(uint64_t, uSrc);
2764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2765
2766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2768 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2770
2771 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2772 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2773
2774 IEM_MC_ADVANCE_RIP();
2775 IEM_MC_END();
2776 return VINF_SUCCESS;
2777 }
2778
2779 /**
2780 * @opdone
2781 * @opmnemonic ud660f16m3
2782 * @opcode 0x16
2783 * @opcodesub 11 mr/reg
2784 * @oppfx 0x66
2785 * @opunused immediate
2786 * @opcpuid sse
2787 * @optest ->
2788 */
2789 return IEMOP_RAISE_INVALID_OPCODE();
2790}
2791
2792
2793/**
2794 * @opcode 0x16
2795 * @oppfx 0xf3
2796 * @opcpuid sse3
2797 * @opgroup og_sse3_pcksclr_datamove
2798 * @opxcpttype 4
2799 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2800 * op1=0x00000002000000020000000100000001
2801 */
2802FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2803{
2804 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2806 if (IEM_IS_MODRM_REG_MODE(bRm))
2807 {
2808 /*
2809 * Register, register.
2810 */
2811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2812 IEM_MC_BEGIN(2, 0);
2813 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2814 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2815
2816 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2817 IEM_MC_PREPARE_SSE_USAGE();
2818
2819 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2820 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2821 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2822
2823 IEM_MC_ADVANCE_RIP();
2824 IEM_MC_END();
2825 }
2826 else
2827 {
2828 /*
2829 * Register, memory.
2830 */
2831 IEM_MC_BEGIN(2, 2);
2832 IEM_MC_LOCAL(RTUINT128U, uSrc);
2833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2834 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2835 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2836
2837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2839 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2840 IEM_MC_PREPARE_SSE_USAGE();
2841
2842 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2843 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2844 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2845
2846 IEM_MC_ADVANCE_RIP();
2847 IEM_MC_END();
2848 }
2849 return VINF_SUCCESS;
2850}
2851
2852/**
2853 * @opdone
2854 * @opmnemonic udf30f16
2855 * @opcode 0x16
2856 * @oppfx 0xf2
2857 * @opunused intel-modrm
2858 * @opcpuid sse
2859 * @optest ->
2860 * @opdone
2861 */
2862
2863
2864/**
2865 * @opcode 0x17
2866 * @opcodesub !11 mr/reg
2867 * @oppfx none
2868 * @opcpuid sse
2869 * @opgroup og_sse_simdfp_datamove
2870 * @opxcpttype 5
2871 * @optest op1=1 op2=2 -> op1=2
2872 * @optest op1=0 op2=-42 -> op1=-42
2873 */
2874FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2875{
2876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2877 if (IEM_IS_MODRM_MEM_MODE(bRm))
2878 {
2879 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2880
2881 IEM_MC_BEGIN(0, 2);
2882 IEM_MC_LOCAL(uint64_t, uSrc);
2883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2884
2885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2887 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2888 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2889
2890 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2891 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2892
2893 IEM_MC_ADVANCE_RIP();
2894 IEM_MC_END();
2895 return VINF_SUCCESS;
2896 }
2897
2898 /**
2899 * @opdone
2900 * @opmnemonic ud0f17m3
2901 * @opcode 0x17
2902 * @opcodesub 11 mr/reg
2903 * @oppfx none
2904 * @opunused immediate
2905 * @opcpuid sse
2906 * @optest ->
2907 */
2908 return IEMOP_RAISE_INVALID_OPCODE();
2909}
2910
2911
2912/**
2913 * @opcode 0x17
2914 * @opcodesub !11 mr/reg
2915 * @oppfx 0x66
2916 * @opcpuid sse2
2917 * @opgroup og_sse2_pcksclr_datamove
2918 * @opxcpttype 5
2919 * @optest op1=1 op2=2 -> op1=2
2920 * @optest op1=0 op2=-42 -> op1=-42
2921 */
2922FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2923{
2924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2925 if (IEM_IS_MODRM_MEM_MODE(bRm))
2926 {
2927 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2928
2929 IEM_MC_BEGIN(0, 2);
2930 IEM_MC_LOCAL(uint64_t, uSrc);
2931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2932
2933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2936 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2937
2938 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2939 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2940
2941 IEM_MC_ADVANCE_RIP();
2942 IEM_MC_END();
2943 return VINF_SUCCESS;
2944 }
2945
2946 /**
2947 * @opdone
2948 * @opmnemonic ud660f17m3
2949 * @opcode 0x17
2950 * @opcodesub 11 mr/reg
2951 * @oppfx 0x66
2952 * @opunused immediate
2953 * @opcpuid sse
2954 * @optest ->
2955 */
2956 return IEMOP_RAISE_INVALID_OPCODE();
2957}
2958
2959
2960/**
2961 * @opdone
2962 * @opmnemonic udf30f17
2963 * @opcode 0x17
2964 * @oppfx 0xf3
2965 * @opunused intel-modrm
2966 * @opcpuid sse
2967 * @optest ->
2968 * @opdone
2969 */
2970
2971/**
2972 * @opmnemonic udf20f17
2973 * @opcode 0x17
2974 * @oppfx 0xf2
2975 * @opunused intel-modrm
2976 * @opcpuid sse
2977 * @optest ->
2978 * @opdone
2979 */
2980
2981
2982/** Opcode 0x0f 0x18. */
2983FNIEMOP_DEF(iemOp_prefetch_Grp16)
2984{
2985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2986 if (IEM_IS_MODRM_MEM_MODE(bRm))
2987 {
2988 switch (IEM_GET_MODRM_REG_8(bRm))
2989 {
2990 case 4: /* Aliased to /0 for the time being according to AMD. */
2991 case 5: /* Aliased to /0 for the time being according to AMD. */
2992 case 6: /* Aliased to /0 for the time being according to AMD. */
2993 case 7: /* Aliased to /0 for the time being according to AMD. */
2994 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2995 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2996 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2997 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2999 }
3000
3001 IEM_MC_BEGIN(0, 1);
3002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 /* Currently a NOP. */
3006 NOREF(GCPtrEffSrc);
3007 IEM_MC_ADVANCE_RIP();
3008 IEM_MC_END();
3009 return VINF_SUCCESS;
3010 }
3011
3012 return IEMOP_RAISE_INVALID_OPCODE();
3013}
3014
3015
3016/** Opcode 0x0f 0x19..0x1f. */
3017FNIEMOP_DEF(iemOp_nop_Ev)
3018{
3019 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3021 if (IEM_IS_MODRM_REG_MODE(bRm))
3022 {
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3024 IEM_MC_BEGIN(0, 0);
3025 IEM_MC_ADVANCE_RIP();
3026 IEM_MC_END();
3027 }
3028 else
3029 {
3030 IEM_MC_BEGIN(0, 1);
3031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034 /* Currently a NOP. */
3035 NOREF(GCPtrEffSrc);
3036 IEM_MC_ADVANCE_RIP();
3037 IEM_MC_END();
3038 }
3039 return VINF_SUCCESS;
3040}
3041
3042
3043/** Opcode 0x0f 0x20. */
3044FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3045{
3046 /* mod is ignored, as is operand size overrides. */
3047 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3048 IEMOP_HLP_MIN_386();
3049 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3050 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3051 else
3052 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3053
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3056 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3057 {
3058 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3059 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3060 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3061 iCrReg |= 8;
3062 }
3063 switch (iCrReg)
3064 {
3065 case 0: case 2: case 3: case 4: case 8:
3066 break;
3067 default:
3068 return IEMOP_RAISE_INVALID_OPCODE();
3069 }
3070 IEMOP_HLP_DONE_DECODING();
3071
3072 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3073}
3074
3075
3076/** Opcode 0x0f 0x21. */
3077FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3078{
3079 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3080 IEMOP_HLP_MIN_386();
3081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3083 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3084 return IEMOP_RAISE_INVALID_OPCODE();
3085 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3086 IEM_GET_MODRM_RM(pVCpu, bRm),
3087 IEM_GET_MODRM_REG_8(bRm));
3088}
3089
3090
3091/** Opcode 0x0f 0x22. */
3092FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3093{
3094 /* mod is ignored, as is operand size overrides. */
3095 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3096 IEMOP_HLP_MIN_386();
3097 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3098 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3099 else
3100 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3101
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3104 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3105 {
3106 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3107 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3108 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3109 iCrReg |= 8;
3110 }
3111 switch (iCrReg)
3112 {
3113 case 0: case 2: case 3: case 4: case 8:
3114 break;
3115 default:
3116 return IEMOP_RAISE_INVALID_OPCODE();
3117 }
3118 IEMOP_HLP_DONE_DECODING();
3119
3120 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3121}
3122
3123
3124/** Opcode 0x0f 0x23. */
3125FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3126{
3127 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3128 IEMOP_HLP_MIN_386();
3129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3131 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3132 return IEMOP_RAISE_INVALID_OPCODE();
3133 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3134 IEM_GET_MODRM_REG_8(bRm),
3135 IEM_GET_MODRM_RM(pVCpu, bRm));
3136}
3137
3138
3139/** Opcode 0x0f 0x24. */
3140FNIEMOP_DEF(iemOp_mov_Rd_Td)
3141{
3142 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3143 IEMOP_HLP_MIN_386();
3144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3146 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3147 return IEMOP_RAISE_INVALID_OPCODE();
3148 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3149 IEM_GET_MODRM_RM(pVCpu, bRm),
3150 IEM_GET_MODRM_REG_8(bRm));
3151}
3152
3153
3154/** Opcode 0x0f 0x26. */
3155FNIEMOP_DEF(iemOp_mov_Td_Rd)
3156{
3157 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3158 IEMOP_HLP_MIN_386();
3159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3162 return IEMOP_RAISE_INVALID_OPCODE();
3163 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3164 IEM_GET_MODRM_REG_8(bRm),
3165 IEM_GET_MODRM_RM(pVCpu, bRm));
3166}
3167
3168
3169/**
3170 * @opcode 0x28
3171 * @oppfx none
3172 * @opcpuid sse
3173 * @opgroup og_sse_simdfp_datamove
3174 * @opxcpttype 1
3175 * @optest op1=1 op2=2 -> op1=2
3176 * @optest op1=0 op2=-42 -> op1=-42
3177 */
3178FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3179{
3180 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3182 if (IEM_IS_MODRM_REG_MODE(bRm))
3183 {
3184 /*
3185 * Register, register.
3186 */
3187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3188 IEM_MC_BEGIN(0, 0);
3189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3191 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3192 IEM_GET_MODRM_RM(pVCpu, bRm));
3193 IEM_MC_ADVANCE_RIP();
3194 IEM_MC_END();
3195 }
3196 else
3197 {
3198 /*
3199 * Register, memory.
3200 */
3201 IEM_MC_BEGIN(0, 2);
3202 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3204
3205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3207 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3208 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3209
3210 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3211 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3212
3213 IEM_MC_ADVANCE_RIP();
3214 IEM_MC_END();
3215 }
3216 return VINF_SUCCESS;
3217}
3218
3219/**
3220 * @opcode 0x28
3221 * @oppfx 66
3222 * @opcpuid sse2
3223 * @opgroup og_sse2_pcksclr_datamove
3224 * @opxcpttype 1
3225 * @optest op1=1 op2=2 -> op1=2
3226 * @optest op1=0 op2=-42 -> op1=-42
3227 */
3228FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3229{
3230 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3232 if (IEM_IS_MODRM_REG_MODE(bRm))
3233 {
3234 /*
3235 * Register, register.
3236 */
3237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3238 IEM_MC_BEGIN(0, 0);
3239 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3240 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3241 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3242 IEM_GET_MODRM_RM(pVCpu, bRm));
3243 IEM_MC_ADVANCE_RIP();
3244 IEM_MC_END();
3245 }
3246 else
3247 {
3248 /*
3249 * Register, memory.
3250 */
3251 IEM_MC_BEGIN(0, 2);
3252 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3254
3255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3258 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3259
3260 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3261 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3262
3263 IEM_MC_ADVANCE_RIP();
3264 IEM_MC_END();
3265 }
3266 return VINF_SUCCESS;
3267}
3268
3269/* Opcode 0xf3 0x0f 0x28 - invalid */
3270/* Opcode 0xf2 0x0f 0x28 - invalid */
3271
3272/**
3273 * @opcode 0x29
3274 * @oppfx none
3275 * @opcpuid sse
3276 * @opgroup og_sse_simdfp_datamove
3277 * @opxcpttype 1
3278 * @optest op1=1 op2=2 -> op1=2
3279 * @optest op1=0 op2=-42 -> op1=-42
3280 */
3281FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3282{
3283 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3285 if (IEM_IS_MODRM_REG_MODE(bRm))
3286 {
3287 /*
3288 * Register, register.
3289 */
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_BEGIN(0, 0);
3292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3294 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3295 IEM_GET_MODRM_REG(pVCpu, bRm));
3296 IEM_MC_ADVANCE_RIP();
3297 IEM_MC_END();
3298 }
3299 else
3300 {
3301 /*
3302 * Memory, register.
3303 */
3304 IEM_MC_BEGIN(0, 2);
3305 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3307
3308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3312
3313 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3314 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3315
3316 IEM_MC_ADVANCE_RIP();
3317 IEM_MC_END();
3318 }
3319 return VINF_SUCCESS;
3320}
3321
3322/**
3323 * @opcode 0x29
3324 * @oppfx 66
3325 * @opcpuid sse2
3326 * @opgroup og_sse2_pcksclr_datamove
3327 * @opxcpttype 1
3328 * @optest op1=1 op2=2 -> op1=2
3329 * @optest op1=0 op2=-42 -> op1=-42
3330 */
3331FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3332{
3333 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3335 if (IEM_IS_MODRM_REG_MODE(bRm))
3336 {
3337 /*
3338 * Register, register.
3339 */
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 IEM_MC_BEGIN(0, 0);
3342 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3343 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3344 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3345 IEM_GET_MODRM_REG(pVCpu, bRm));
3346 IEM_MC_ADVANCE_RIP();
3347 IEM_MC_END();
3348 }
3349 else
3350 {
3351 /*
3352 * Memory, register.
3353 */
3354 IEM_MC_BEGIN(0, 2);
3355 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3357
3358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3362
3363 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3364 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3365
3366 IEM_MC_ADVANCE_RIP();
3367 IEM_MC_END();
3368 }
3369 return VINF_SUCCESS;
3370}
3371
3372/* Opcode 0xf3 0x0f 0x29 - invalid */
3373/* Opcode 0xf2 0x0f 0x29 - invalid */
3374
3375
3376/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3377FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3378/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3379FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3380/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3381FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
3382/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3383FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
3384
3385
3386/**
3387 * @opcode 0x2b
3388 * @opcodesub !11 mr/reg
3389 * @oppfx none
3390 * @opcpuid sse
3391 * @opgroup og_sse1_cachect
3392 * @opxcpttype 1
3393 * @optest op1=1 op2=2 -> op1=2
3394 * @optest op1=0 op2=-42 -> op1=-42
3395 */
3396FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3397{
3398 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3400 if (IEM_IS_MODRM_MEM_MODE(bRm))
3401 {
3402 /*
3403 * memory, register.
3404 */
3405 IEM_MC_BEGIN(0, 2);
3406 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3408
3409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3411 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3412 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3413
3414 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3415 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3416
3417 IEM_MC_ADVANCE_RIP();
3418 IEM_MC_END();
3419 }
3420 /* The register, register encoding is invalid. */
3421 else
3422 return IEMOP_RAISE_INVALID_OPCODE();
3423 return VINF_SUCCESS;
3424}
3425
3426/**
3427 * @opcode 0x2b
3428 * @opcodesub !11 mr/reg
3429 * @oppfx 0x66
3430 * @opcpuid sse2
3431 * @opgroup og_sse2_cachect
3432 * @opxcpttype 1
3433 * @optest op1=1 op2=2 -> op1=2
3434 * @optest op1=0 op2=-42 -> op1=-42
3435 */
3436FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3437{
3438 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3440 if (IEM_IS_MODRM_MEM_MODE(bRm))
3441 {
3442 /*
3443 * memory, register.
3444 */
3445 IEM_MC_BEGIN(0, 2);
3446 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3453
3454 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3455 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3456
3457 IEM_MC_ADVANCE_RIP();
3458 IEM_MC_END();
3459 }
3460 /* The register, register encoding is invalid. */
3461 else
3462 return IEMOP_RAISE_INVALID_OPCODE();
3463 return VINF_SUCCESS;
3464}
3465/* Opcode 0xf3 0x0f 0x2b - invalid */
3466/* Opcode 0xf2 0x0f 0x2b - invalid */
3467
3468
3469/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3470FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3471/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3472FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3473/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3474FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
3475/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
3476FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
3477
3478/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
3479FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
3480/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
3481FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
3482/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
3483FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
3484/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
3485FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
3486
3487/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
3488FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
3489/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
3490FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
3491/* Opcode 0xf3 0x0f 0x2e - invalid */
3492/* Opcode 0xf2 0x0f 0x2e - invalid */
3493
3494/** Opcode 0x0f 0x2f - comiss Vss, Wss */
3495FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
3496/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
3497FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
3498/* Opcode 0xf3 0x0f 0x2f - invalid */
3499/* Opcode 0xf2 0x0f 0x2f - invalid */
3500
3501/** Opcode 0x0f 0x30. */
3502FNIEMOP_DEF(iemOp_wrmsr)
3503{
3504 IEMOP_MNEMONIC(wrmsr, "wrmsr");
3505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3506 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
3507}
3508
3509
3510/** Opcode 0x0f 0x31. */
3511FNIEMOP_DEF(iemOp_rdtsc)
3512{
3513 IEMOP_MNEMONIC(rdtsc, "rdtsc");
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
3516}
3517
3518
3519/** Opcode 0x0f 0x33. */
3520FNIEMOP_DEF(iemOp_rdmsr)
3521{
3522 IEMOP_MNEMONIC(rdmsr, "rdmsr");
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
3525}
3526
3527
3528/** Opcode 0x0f 0x34. */
3529FNIEMOP_DEF(iemOp_rdpmc)
3530{
3531 IEMOP_MNEMONIC(rdpmc, "rdpmc");
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3533 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
3534}
3535
3536
3537/** Opcode 0x0f 0x34. */
3538FNIEMOP_DEF(iemOp_sysenter)
3539{
3540 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3542 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
3543}
3544
3545/** Opcode 0x0f 0x35. */
3546FNIEMOP_DEF(iemOp_sysexit)
3547{
3548 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3550 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
3551}
3552
3553/** Opcode 0x0f 0x37. */
3554FNIEMOP_STUB(iemOp_getsec);
3555
3556
3557/** Opcode 0x0f 0x38. */
3558FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
3559{
3560#ifdef IEM_WITH_THREE_0F_38
3561 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3562 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3563#else
3564 IEMOP_BITCH_ABOUT_STUB();
3565 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3566#endif
3567}
3568
3569
3570/** Opcode 0x0f 0x3a. */
3571FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
3572{
3573#ifdef IEM_WITH_THREE_0F_3A
3574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3575 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3576#else
3577 IEMOP_BITCH_ABOUT_STUB();
3578 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3579#endif
3580}
3581
3582
3583/**
3584 * Implements a conditional move.
3585 *
3586 * Wish there was an obvious way to do this where we could share and reduce
3587 * code bloat.
3588 *
3589 * @param a_Cnd The conditional "microcode" operation.
3590 */
3591#define CMOV_X(a_Cnd) \
3592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3593 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3594 { \
3595 switch (pVCpu->iem.s.enmEffOpSize) \
3596 { \
3597 case IEMMODE_16BIT: \
3598 IEM_MC_BEGIN(0, 1); \
3599 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3600 a_Cnd { \
3601 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3602 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3603 } IEM_MC_ENDIF(); \
3604 IEM_MC_ADVANCE_RIP(); \
3605 IEM_MC_END(); \
3606 return VINF_SUCCESS; \
3607 \
3608 case IEMMODE_32BIT: \
3609 IEM_MC_BEGIN(0, 1); \
3610 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3611 a_Cnd { \
3612 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3613 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3614 } IEM_MC_ELSE() { \
3615 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3616 } IEM_MC_ENDIF(); \
3617 IEM_MC_ADVANCE_RIP(); \
3618 IEM_MC_END(); \
3619 return VINF_SUCCESS; \
3620 \
3621 case IEMMODE_64BIT: \
3622 IEM_MC_BEGIN(0, 1); \
3623 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3624 a_Cnd { \
3625 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3626 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3627 } IEM_MC_ENDIF(); \
3628 IEM_MC_ADVANCE_RIP(); \
3629 IEM_MC_END(); \
3630 return VINF_SUCCESS; \
3631 \
3632 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3633 } \
3634 } \
3635 else \
3636 { \
3637 switch (pVCpu->iem.s.enmEffOpSize) \
3638 { \
3639 case IEMMODE_16BIT: \
3640 IEM_MC_BEGIN(0, 2); \
3641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3642 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3644 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3645 a_Cnd { \
3646 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3647 } IEM_MC_ENDIF(); \
3648 IEM_MC_ADVANCE_RIP(); \
3649 IEM_MC_END(); \
3650 return VINF_SUCCESS; \
3651 \
3652 case IEMMODE_32BIT: \
3653 IEM_MC_BEGIN(0, 2); \
3654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3655 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3657 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3658 a_Cnd { \
3659 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3660 } IEM_MC_ELSE() { \
3661 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3662 } IEM_MC_ENDIF(); \
3663 IEM_MC_ADVANCE_RIP(); \
3664 IEM_MC_END(); \
3665 return VINF_SUCCESS; \
3666 \
3667 case IEMMODE_64BIT: \
3668 IEM_MC_BEGIN(0, 2); \
3669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3670 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3672 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3673 a_Cnd { \
3674 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3675 } IEM_MC_ENDIF(); \
3676 IEM_MC_ADVANCE_RIP(); \
3677 IEM_MC_END(); \
3678 return VINF_SUCCESS; \
3679 \
3680 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3681 } \
3682 } do {} while (0)
3683
3684
3685
3686/** Opcode 0x0f 0x40. */
3687FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3688{
3689 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3690 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3691}
3692
3693
3694/** Opcode 0x0f 0x41. */
3695FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3696{
3697 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3698 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3699}
3700
3701
3702/** Opcode 0x0f 0x42. */
3703FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3704{
3705 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3706 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3707}
3708
3709
3710/** Opcode 0x0f 0x43. */
3711FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3712{
3713 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3714 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3715}
3716
3717
3718/** Opcode 0x0f 0x44. */
3719FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3720{
3721 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3722 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3723}
3724
3725
3726/** Opcode 0x0f 0x45. */
3727FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3728{
3729 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3730 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3731}
3732
3733
3734/** Opcode 0x0f 0x46. */
3735FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3736{
3737 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3738 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3739}
3740
3741
3742/** Opcode 0x0f 0x47. */
3743FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3744{
3745 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3746 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3747}
3748
3749
3750/** Opcode 0x0f 0x48. */
3751FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3752{
3753 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3754 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3755}
3756
3757
3758/** Opcode 0x0f 0x49. */
3759FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3760{
3761 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3762 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3763}
3764
3765
3766/** Opcode 0x0f 0x4a. */
3767FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3768{
3769 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3770 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3771}
3772
3773
3774/** Opcode 0x0f 0x4b. */
3775FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3776{
3777 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3778 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3779}
3780
3781
3782/** Opcode 0x0f 0x4c. */
3783FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3784{
3785 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3786 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3787}
3788
3789
3790/** Opcode 0x0f 0x4d. */
3791FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3792{
3793 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3794 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3795}
3796
3797
3798/** Opcode 0x0f 0x4e. */
3799FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3800{
3801 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3802 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3803}
3804
3805
3806/** Opcode 0x0f 0x4f. */
3807FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
3808{
3809 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
3810 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3811}
3812
3813#undef CMOV_X
3814
3815/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3816FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3817/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3818FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3819/* Opcode 0xf3 0x0f 0x50 - invalid */
3820/* Opcode 0xf2 0x0f 0x50 - invalid */
3821
3822/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3823FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3824/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3825FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3826/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3827FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3828/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3829FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3830
3831/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3832FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3833/* Opcode 0x66 0x0f 0x52 - invalid */
3834/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3835FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3836/* Opcode 0xf2 0x0f 0x52 - invalid */
3837
3838/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3839FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3840/* Opcode 0x66 0x0f 0x53 - invalid */
3841/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3842FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3843/* Opcode 0xf2 0x0f 0x53 - invalid */
3844
3845
3846/** Opcode 0x0f 0x54 - andps Vps, Wps */
3847FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3848{
3849 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3850 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3851}
3852
3853
3854/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3855FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3856{
3857 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3858 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3859}
3860
3861
3862/* Opcode 0xf3 0x0f 0x54 - invalid */
3863/* Opcode 0xf2 0x0f 0x54 - invalid */
3864
3865
3866/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3867FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3868{
3869 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3870 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3871}
3872
3873
3874/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
3875FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
3876{
3877 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3878 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3879}
3880
3881
3882/* Opcode 0xf3 0x0f 0x55 - invalid */
3883/* Opcode 0xf2 0x0f 0x55 - invalid */
3884
3885
3886/** Opcode 0x0f 0x56 - orps Vps, Wps */
3887FNIEMOP_DEF(iemOp_orps_Vps_Wps)
3888{
3889 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3890 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3891}
3892
3893
3894/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3895FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
3896{
3897 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3898 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3899}
3900
3901
3902/* Opcode 0xf3 0x0f 0x56 - invalid */
3903/* Opcode 0xf2 0x0f 0x56 - invalid */
3904
3905
3906/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3907FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
3908{
3909 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3910 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3911}
3912
3913
3914/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3915FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
3916{
3917 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3918 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3919}
3920
3921
3922/* Opcode 0xf3 0x0f 0x57 - invalid */
3923/* Opcode 0xf2 0x0f 0x57 - invalid */
3924
3925/** Opcode 0x0f 0x58 - addps Vps, Wps */
3926FNIEMOP_DEF(iemOp_addps_Vps_Wps)
3927{
3928 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3929 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
3930}
3931
3932
3933/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3934FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
3935{
3936 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3937 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
3938}
3939
3940
3941/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3942FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3943/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3944FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3945
3946
3947/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3948FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
3949{
3950 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3951 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
3952}
3953
3954
3955/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3956FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
3957{
3958 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3959 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
3960}
3961
3962
3963/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3964FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3965/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3966FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3967
3968/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3969FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3970/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3971FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3972/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3973FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3974/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3975FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3976
3977/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3978FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3979/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3980FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3981/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3982FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3983/* Opcode 0xf2 0x0f 0x5b - invalid */
3984
3985
3986/** Opcode 0x0f 0x5c - subps Vps, Wps */
3987FNIEMOP_DEF(iemOp_subps_Vps_Wps)
3988{
3989 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3990 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
3991}
3992
3993
3994/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3995FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
3996{
3997 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3998 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
3999}
4000
4001
4002/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
4003FNIEMOP_STUB(iemOp_subss_Vss_Wss);
4004/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
4005FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
4006
4007
4008/** Opcode 0x0f 0x5d - minps Vps, Wps */
4009FNIEMOP_DEF(iemOp_minps_Vps_Wps)
4010{
4011 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4012 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
4013}
4014
4015
4016/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
4017FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
4018{
4019 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4020 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
4021}
4022
4023
4024/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
4025FNIEMOP_STUB(iemOp_minss_Vss_Wss);
4026/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
4027FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
4028
4029
4030/** Opcode 0x0f 0x5e - divps Vps, Wps */
4031FNIEMOP_DEF(iemOp_divps_Vps_Wps)
4032{
4033 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4034 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
4035}
4036
4037
4038/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
4039FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
4040{
4041 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4042 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
4043}
4044
4045
4046/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
4047FNIEMOP_STUB(iemOp_divss_Vss_Wss);
4048/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
4049FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
4050
4051
4052/** Opcode 0x0f 0x5f - maxps Vps, Wps */
4053FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
4054{
4055 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4056 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
4057}
4058
4059
4060/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
4061FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
4062{
4063 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4064 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
4065}
4066
4067
4068/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
4069FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
4070/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
4071FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
4072
4073
4074/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
4075FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
4076{
4077 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4078 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
4079}
4080
4081
4082/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
4083FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
4084{
4085 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4086 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
4087}
4088
4089
4090/* Opcode 0xf3 0x0f 0x60 - invalid */
4091
4092
4093/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
4094FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
4095{
4096 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
4097 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4098 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
4099}
4100
4101
4102/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
4103FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
4104{
4105 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4106 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
4107}
4108
4109
4110/* Opcode 0xf3 0x0f 0x61 - invalid */
4111
4112
4113/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
4114FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
4115{
4116 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4117 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
4118}
4119
4120
4121/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
4122FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
4123{
4124 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4125 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
4126}
4127
4128
4129/* Opcode 0xf3 0x0f 0x62 - invalid */
4130
4131
4132
4133/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
4134FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
4135{
4136 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4137 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
4138}
4139
4140
4141/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
4142FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
4143{
4144 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4145 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
4146}
4147
4148
4149/* Opcode 0xf3 0x0f 0x63 - invalid */
4150
4151
4152/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
4153FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
4154{
4155 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4156 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
4157}
4158
4159
4160/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
4161FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
4162{
4163 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4164 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
4165}
4166
4167
4168/* Opcode 0xf3 0x0f 0x64 - invalid */
4169
4170
4171/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
4172FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
4173{
4174 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4175 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
4176}
4177
4178
4179/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
4180FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
4181{
4182 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4183 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
4184}
4185
4186
4187/* Opcode 0xf3 0x0f 0x65 - invalid */
4188
4189
4190/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
4191FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
4192{
4193 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4194 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
4195}
4196
4197
4198/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
4199FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
4200{
4201 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4202 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
4203}
4204
4205
4206/* Opcode 0xf3 0x0f 0x66 - invalid */
4207
4208
4209/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
4210FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
4211{
4212 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4213 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
4214}
4215
4216
4217/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
4218FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
4219{
4220 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4221 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
4222}
4223
4224
4225/* Opcode 0xf3 0x0f 0x67 - invalid */
4226
4227
4228/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
4229 * @note Intel and AMD both uses Qd for the second parameter, however they
4230 * both list it as a mmX/mem64 operand and intel describes it as being
4231 * loaded as a qword, so it should be Qq, shouldn't it? */
4232FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
4233{
4234 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4235 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
4236}
4237
4238
4239/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
4240FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
4241{
4242 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4243 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
4244}
4245
4246
4247/* Opcode 0xf3 0x0f 0x68 - invalid */
4248
4249
4250/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
4251 * @note Intel and AMD both uses Qd for the second parameter, however they
4252 * both list it as a mmX/mem64 operand and intel describes it as being
4253 * loaded as a qword, so it should be Qq, shouldn't it? */
4254FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
4255{
4256 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4257 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
4258}
4259
4260
4261/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
4262FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
4263{
4264 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4265 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
4266
4267}
4268
4269
4270/* Opcode 0xf3 0x0f 0x69 - invalid */
4271
4272
4273/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
4274 * @note Intel and AMD both uses Qd for the second parameter, however they
4275 * both list it as a mmX/mem64 operand and intel describes it as being
4276 * loaded as a qword, so it should be Qq, shouldn't it? */
4277FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
4278{
4279 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4280 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
4281}
4282
4283
4284/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
4285FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
4286{
4287 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4288 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
4289}
4290
4291
4292/* Opcode 0xf3 0x0f 0x6a - invalid */
4293
4294
4295/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
4296FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
4297{
4298 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4299 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
4300}
4301
4302
4303/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
4304FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
4305{
4306 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4307 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
4308}
4309
4310
4311/* Opcode 0xf3 0x0f 0x6b - invalid */
4312
4313
4314/* Opcode 0x0f 0x6c - invalid */
4315
4316
4317/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
4318FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
4319{
4320 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4321 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
4322}
4323
4324
4325/* Opcode 0xf3 0x0f 0x6c - invalid */
4326/* Opcode 0xf2 0x0f 0x6c - invalid */
4327
4328
4329/* Opcode 0x0f 0x6d - invalid */
4330
4331
4332/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
4333FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
4334{
4335 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4336 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
4337}
4338
4339
4340/* Opcode 0xf3 0x0f 0x6d - invalid */
4341
4342
4343FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
4344{
4345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4346 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4347 {
4348 /**
4349 * @opcode 0x6e
4350 * @opcodesub rex.w=1
4351 * @oppfx none
4352 * @opcpuid mmx
4353 * @opgroup og_mmx_datamove
4354 * @opxcpttype 5
4355 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4356 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4357 */
4358 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4359 if (IEM_IS_MODRM_REG_MODE(bRm))
4360 {
4361 /* MMX, greg64 */
4362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4363 IEM_MC_BEGIN(0, 1);
4364 IEM_MC_LOCAL(uint64_t, u64Tmp);
4365
4366 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4367 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4368
4369 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4370 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4371 IEM_MC_FPU_TO_MMX_MODE();
4372
4373 IEM_MC_ADVANCE_RIP();
4374 IEM_MC_END();
4375 }
4376 else
4377 {
4378 /* MMX, [mem64] */
4379 IEM_MC_BEGIN(0, 2);
4380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4381 IEM_MC_LOCAL(uint64_t, u64Tmp);
4382
4383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4386 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4387
4388 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4389 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4390 IEM_MC_FPU_TO_MMX_MODE();
4391
4392 IEM_MC_ADVANCE_RIP();
4393 IEM_MC_END();
4394 }
4395 }
4396 else
4397 {
4398 /**
4399 * @opdone
4400 * @opcode 0x6e
4401 * @opcodesub rex.w=0
4402 * @oppfx none
4403 * @opcpuid mmx
4404 * @opgroup og_mmx_datamove
4405 * @opxcpttype 5
4406 * @opfunction iemOp_movd_q_Pd_Ey
4407 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4408 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4409 */
4410 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4411 if (IEM_IS_MODRM_REG_MODE(bRm))
4412 {
4413 /* MMX, greg */
4414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4415 IEM_MC_BEGIN(0, 1);
4416 IEM_MC_LOCAL(uint64_t, u64Tmp);
4417
4418 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4419 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4420
4421 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4422 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4423 IEM_MC_FPU_TO_MMX_MODE();
4424
4425 IEM_MC_ADVANCE_RIP();
4426 IEM_MC_END();
4427 }
4428 else
4429 {
4430 /* MMX, [mem] */
4431 IEM_MC_BEGIN(0, 2);
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4433 IEM_MC_LOCAL(uint32_t, u32Tmp);
4434
4435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4437 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4438 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4439
4440 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4441 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4442 IEM_MC_FPU_TO_MMX_MODE();
4443
4444 IEM_MC_ADVANCE_RIP();
4445 IEM_MC_END();
4446 }
4447 }
4448 return VINF_SUCCESS;
4449}
4450
4451FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4452{
4453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4454 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4455 {
4456 /**
4457 * @opcode 0x6e
4458 * @opcodesub rex.w=1
4459 * @oppfx 0x66
4460 * @opcpuid sse2
4461 * @opgroup og_sse2_simdint_datamove
4462 * @opxcpttype 5
4463 * @optest 64-bit / op1=1 op2=2 -> op1=2
4464 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4465 */
4466 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4467 if (IEM_IS_MODRM_REG_MODE(bRm))
4468 {
4469 /* XMM, greg64 */
4470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4471 IEM_MC_BEGIN(0, 1);
4472 IEM_MC_LOCAL(uint64_t, u64Tmp);
4473
4474 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4475 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4476
4477 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4478 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4479
4480 IEM_MC_ADVANCE_RIP();
4481 IEM_MC_END();
4482 }
4483 else
4484 {
4485 /* XMM, [mem64] */
4486 IEM_MC_BEGIN(0, 2);
4487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4488 IEM_MC_LOCAL(uint64_t, u64Tmp);
4489
4490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4492 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4493 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4494
4495 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4496 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4497
4498 IEM_MC_ADVANCE_RIP();
4499 IEM_MC_END();
4500 }
4501 }
4502 else
4503 {
4504 /**
4505 * @opdone
4506 * @opcode 0x6e
4507 * @opcodesub rex.w=0
4508 * @oppfx 0x66
4509 * @opcpuid sse2
4510 * @opgroup og_sse2_simdint_datamove
4511 * @opxcpttype 5
4512 * @opfunction iemOp_movd_q_Vy_Ey
4513 * @optest op1=1 op2=2 -> op1=2
4514 * @optest op1=0 op2=-42 -> op1=-42
4515 */
4516 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4517 if (IEM_IS_MODRM_REG_MODE(bRm))
4518 {
4519 /* XMM, greg32 */
4520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4521 IEM_MC_BEGIN(0, 1);
4522 IEM_MC_LOCAL(uint32_t, u32Tmp);
4523
4524 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4525 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4526
4527 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4528 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4529
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 else
4534 {
4535 /* XMM, [mem32] */
4536 IEM_MC_BEGIN(0, 2);
4537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4538 IEM_MC_LOCAL(uint32_t, u32Tmp);
4539
4540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4542 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4543 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4544
4545 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4546 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4547
4548 IEM_MC_ADVANCE_RIP();
4549 IEM_MC_END();
4550 }
4551 }
4552 return VINF_SUCCESS;
4553}
4554
4555/* Opcode 0xf3 0x0f 0x6e - invalid */
4556
4557
4558/**
4559 * @opcode 0x6f
4560 * @oppfx none
4561 * @opcpuid mmx
4562 * @opgroup og_mmx_datamove
4563 * @opxcpttype 5
4564 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4565 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4566 */
4567FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4568{
4569 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4571 if (IEM_IS_MODRM_REG_MODE(bRm))
4572 {
4573 /*
4574 * Register, register.
4575 */
4576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4577 IEM_MC_BEGIN(0, 1);
4578 IEM_MC_LOCAL(uint64_t, u64Tmp);
4579
4580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4581 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4582
4583 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4584 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4585 IEM_MC_FPU_TO_MMX_MODE();
4586
4587 IEM_MC_ADVANCE_RIP();
4588 IEM_MC_END();
4589 }
4590 else
4591 {
4592 /*
4593 * Register, memory.
4594 */
4595 IEM_MC_BEGIN(0, 2);
4596 IEM_MC_LOCAL(uint64_t, u64Tmp);
4597 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4598
4599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4601 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4602 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4603
4604 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4605 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4606 IEM_MC_FPU_TO_MMX_MODE();
4607
4608 IEM_MC_ADVANCE_RIP();
4609 IEM_MC_END();
4610 }
4611 return VINF_SUCCESS;
4612}
4613
4614/**
4615 * @opcode 0x6f
4616 * @oppfx 0x66
4617 * @opcpuid sse2
4618 * @opgroup og_sse2_simdint_datamove
4619 * @opxcpttype 1
4620 * @optest op1=1 op2=2 -> op1=2
4621 * @optest op1=0 op2=-42 -> op1=-42
4622 */
4623FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4624{
4625 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4627 if (IEM_IS_MODRM_REG_MODE(bRm))
4628 {
4629 /*
4630 * Register, register.
4631 */
4632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4633 IEM_MC_BEGIN(0, 0);
4634
4635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4637
4638 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4639 IEM_GET_MODRM_RM(pVCpu, bRm));
4640 IEM_MC_ADVANCE_RIP();
4641 IEM_MC_END();
4642 }
4643 else
4644 {
4645 /*
4646 * Register, memory.
4647 */
4648 IEM_MC_BEGIN(0, 2);
4649 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4651
4652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4654 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4655 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4656
4657 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4658 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4659
4660 IEM_MC_ADVANCE_RIP();
4661 IEM_MC_END();
4662 }
4663 return VINF_SUCCESS;
4664}
4665
4666/**
4667 * @opcode 0x6f
4668 * @oppfx 0xf3
4669 * @opcpuid sse2
4670 * @opgroup og_sse2_simdint_datamove
4671 * @opxcpttype 4UA
4672 * @optest op1=1 op2=2 -> op1=2
4673 * @optest op1=0 op2=-42 -> op1=-42
4674 */
4675FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4676{
4677 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4678 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4679 if (IEM_IS_MODRM_REG_MODE(bRm))
4680 {
4681 /*
4682 * Register, register.
4683 */
4684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4685 IEM_MC_BEGIN(0, 0);
4686 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4687 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4688 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4689 IEM_GET_MODRM_RM(pVCpu, bRm));
4690 IEM_MC_ADVANCE_RIP();
4691 IEM_MC_END();
4692 }
4693 else
4694 {
4695 /*
4696 * Register, memory.
4697 */
4698 IEM_MC_BEGIN(0, 2);
4699 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4701
4702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4704 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4705 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4706 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4707 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4708
4709 IEM_MC_ADVANCE_RIP();
4710 IEM_MC_END();
4711 }
4712 return VINF_SUCCESS;
4713}
4714
4715
4716/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
4717FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
4718{
4719 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4721 if (IEM_IS_MODRM_REG_MODE(bRm))
4722 {
4723 /*
4724 * Register, register.
4725 */
4726 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4728
4729 IEM_MC_BEGIN(3, 0);
4730 IEM_MC_ARG(uint64_t *, pDst, 0);
4731 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4732 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4733 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4734 IEM_MC_PREPARE_FPU_USAGE();
4735 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4736 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
4737 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4738 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4739 IEM_MC_FPU_TO_MMX_MODE();
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 else
4744 {
4745 /*
4746 * Register, memory.
4747 */
4748 IEM_MC_BEGIN(3, 2);
4749 IEM_MC_ARG(uint64_t *, pDst, 0);
4750 IEM_MC_LOCAL(uint64_t, uSrc);
4751 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4753
4754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4755 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4756 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4758 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4759
4760 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4761 IEM_MC_PREPARE_FPU_USAGE();
4762 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4763 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4764 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4765 IEM_MC_FPU_TO_MMX_MODE();
4766
4767 IEM_MC_ADVANCE_RIP();
4768 IEM_MC_END();
4769 }
4770 return VINF_SUCCESS;
4771}
4772
4773
4774/**
4775 * Common worker for SSE2 instructions on the forms:
4776 * pshufd xmm1, xmm2/mem128, imm8
4777 * pshufhw xmm1, xmm2/mem128, imm8
4778 * pshuflw xmm1, xmm2/mem128, imm8
4779 *
4780 * Proper alignment of the 128-bit operand is enforced.
4781 * Exceptions type 4. SSE2 cpuid checks.
4782 */
4783FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
4784{
4785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4786 if (IEM_IS_MODRM_REG_MODE(bRm))
4787 {
4788 /*
4789 * Register, register.
4790 */
4791 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4793
4794 IEM_MC_BEGIN(3, 0);
4795 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4796 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4797 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4798 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4799 IEM_MC_PREPARE_SSE_USAGE();
4800 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4801 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4802 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 }
4806 else
4807 {
4808 /*
4809 * Register, memory.
4810 */
4811 IEM_MC_BEGIN(3, 2);
4812 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4813 IEM_MC_LOCAL(RTUINT128U, uSrc);
4814 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4816
4817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4818 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4819 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4821 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4822
4823 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4824 IEM_MC_PREPARE_SSE_USAGE();
4825 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4826 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4827
4828 IEM_MC_ADVANCE_RIP();
4829 IEM_MC_END();
4830 }
4831 return VINF_SUCCESS;
4832}
4833
4834
4835/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4836FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4837{
4838 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4839 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
4840}
4841
4842
4843/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
4844FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
4845{
4846 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4847 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
4848}
4849
4850
4851/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4852FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4853{
4854 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4855 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
4856}
4857
4858
4859/**
4860 * Common worker for MMX instructions of the form:
4861 * psrlw mm, imm8
4862 * psraw mm, imm8
4863 * psllw mm, imm8
4864 * psrld mm, imm8
4865 * psrad mm, imm8
4866 * pslld mm, imm8
4867 * psrlq mm, imm8
4868 * psllq mm, imm8
4869 *
4870 */
4871FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
4872{
4873 if (IEM_IS_MODRM_REG_MODE(bRm))
4874 {
4875 /*
4876 * Register, immediate.
4877 */
4878 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4880
4881 IEM_MC_BEGIN(2, 0);
4882 IEM_MC_ARG(uint64_t *, pDst, 0);
4883 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4884 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4885 IEM_MC_PREPARE_FPU_USAGE();
4886 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4887 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
4888 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4889 IEM_MC_FPU_TO_MMX_MODE();
4890 IEM_MC_ADVANCE_RIP();
4891 IEM_MC_END();
4892 }
4893 else
4894 {
4895 /*
4896 * Register, memory not supported.
4897 */
4898 /// @todo Caller already enforced register mode?!
4899 }
4900 return VINF_SUCCESS;
4901}
4902
4903
4904/**
4905 * Common worker for SSE2 instructions of the form:
4906 * psrlw xmm, imm8
4907 * psraw xmm, imm8
4908 * psllw xmm, imm8
4909 * psrld xmm, imm8
4910 * psrad xmm, imm8
4911 * pslld xmm, imm8
4912 * psrlq xmm, imm8
4913 * psllq xmm, imm8
4914 *
4915 */
4916FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
4917{
4918 if (IEM_IS_MODRM_REG_MODE(bRm))
4919 {
4920 /*
4921 * Register, immediate.
4922 */
4923 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4925
4926 IEM_MC_BEGIN(2, 0);
4927 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4928 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4929 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4930 IEM_MC_PREPARE_SSE_USAGE();
4931 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4932 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
4933 IEM_MC_ADVANCE_RIP();
4934 IEM_MC_END();
4935 }
4936 else
4937 {
4938 /*
4939 * Register, memory.
4940 */
4941 /// @todo Caller already enforced register mode?!
4942 }
4943 return VINF_SUCCESS;
4944}
4945
4946
4947/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
4948FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
4949{
4950// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4951 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
4952}
4953
4954
4955/** Opcode 0x66 0x0f 0x71 11/2. */
4956FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
4957{
4958// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4959 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
4960}
4961
4962
4963/** Opcode 0x0f 0x71 11/4. */
4964FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
4965{
4966// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4967 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
4968}
4969
4970
4971/** Opcode 0x66 0x0f 0x71 11/4. */
4972FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
4973{
4974// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4975 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
4976}
4977
4978
4979/** Opcode 0x0f 0x71 11/6. */
4980FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
4981{
4982// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4983 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
4984}
4985
4986
4987/** Opcode 0x66 0x0f 0x71 11/6. */
4988FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
4989{
4990// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4991 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
4992}
4993
4994
4995/**
4996 * Group 12 jump table for register variant.
4997 */
4998IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4999{
5000 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5001 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5002 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5003 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5004 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5005 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5006 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5007 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5008};
5009AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
5010
5011
5012/** Opcode 0x0f 0x71. */
5013FNIEMOP_DEF(iemOp_Grp12)
5014{
5015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5016 if (IEM_IS_MODRM_REG_MODE(bRm))
5017 /* register, register */
5018 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5019 + pVCpu->iem.s.idxPrefix], bRm);
5020 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5021}
5022
5023
5024/** Opcode 0x0f 0x72 11/2. */
5025FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
5026{
5027// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5028 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
5029}
5030
5031
5032/** Opcode 0x66 0x0f 0x72 11/2. */
5033FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
5034{
5035// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5036 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
5037}
5038
5039
5040/** Opcode 0x0f 0x72 11/4. */
5041FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
5042{
5043// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5044 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
5045}
5046
5047
5048/** Opcode 0x66 0x0f 0x72 11/4. */
5049FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
5050{
5051// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5052 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
5053}
5054
5055
5056/** Opcode 0x0f 0x72 11/6. */
5057FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
5058{
5059// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5060 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
5061}
5062
5063/** Opcode 0x66 0x0f 0x72 11/6. */
5064FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
5065{
5066// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5067 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
5068}
5069
5070
5071/**
5072 * Group 13 jump table for register variant.
5073 */
5074IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
5075{
5076 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5077 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5078 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5079 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5080 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5081 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5082 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5083 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5084};
5085AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
5086
5087/** Opcode 0x0f 0x72. */
5088FNIEMOP_DEF(iemOp_Grp13)
5089{
5090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5091 if (IEM_IS_MODRM_REG_MODE(bRm))
5092 /* register, register */
5093 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5094 + pVCpu->iem.s.idxPrefix], bRm);
5095 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5096}
5097
5098
5099/** Opcode 0x0f 0x73 11/2. */
5100FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
5101{
5102// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5103 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
5104}
5105
5106
5107/** Opcode 0x66 0x0f 0x73 11/2. */
5108FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
5109{
5110// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5111 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
5112}
5113
5114
5115/** Opcode 0x66 0x0f 0x73 11/3. */
5116FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
5117{
5118// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5119 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
5120}
5121
5122
5123/** Opcode 0x0f 0x73 11/6. */
5124FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
5125{
5126// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5127 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
5128}
5129
5130
5131/** Opcode 0x66 0x0f 0x73 11/6. */
5132FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
5133{
5134// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5135 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
5136}
5137
5138
5139/** Opcode 0x66 0x0f 0x73 11/7. */
5140FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
5141{
5142// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5143 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
5144}
5145
5146/**
5147 * Group 14 jump table for register variant.
5148 */
5149IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
5150{
5151 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5152 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5153 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5154 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5155 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5156 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5157 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5158 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5159};
5160AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
5161
5162
5163/** Opcode 0x0f 0x73. */
5164FNIEMOP_DEF(iemOp_Grp14)
5165{
5166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5167 if (IEM_IS_MODRM_REG_MODE(bRm))
5168 /* register, register */
5169 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5170 + pVCpu->iem.s.idxPrefix], bRm);
5171 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5172}
5173
5174
5175/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
5176FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
5177{
5178 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5179 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
5180}
5181
5182
5183/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
5184FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
5185{
5186 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5187 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
5188}
5189
5190
5191/* Opcode 0xf3 0x0f 0x74 - invalid */
5192/* Opcode 0xf2 0x0f 0x74 - invalid */
5193
5194
5195/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
5196FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
5197{
5198 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5199 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
5200}
5201
5202
5203/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
5204FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
5205{
5206 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5207 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
5208}
5209
5210
5211/* Opcode 0xf3 0x0f 0x75 - invalid */
5212/* Opcode 0xf2 0x0f 0x75 - invalid */
5213
5214
5215/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
5216FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
5217{
5218 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5219 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
5220}
5221
5222
5223/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
5224FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
5225{
5226 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5227 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
5228}
5229
5230
5231/* Opcode 0xf3 0x0f 0x76 - invalid */
5232/* Opcode 0xf2 0x0f 0x76 - invalid */
5233
5234
5235/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
5236FNIEMOP_DEF(iemOp_emms)
5237{
5238 IEMOP_MNEMONIC(emms, "emms");
5239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5240
5241 IEM_MC_BEGIN(0,0);
5242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
5243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5244 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5245 IEM_MC_FPU_FROM_MMX_MODE();
5246 IEM_MC_ADVANCE_RIP();
5247 IEM_MC_END();
5248 return VINF_SUCCESS;
5249}
5250
5251/* Opcode 0x66 0x0f 0x77 - invalid */
5252/* Opcode 0xf3 0x0f 0x77 - invalid */
5253/* Opcode 0xf2 0x0f 0x77 - invalid */
5254
5255/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
5256#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5257FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
5258{
5259 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
5260 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
5261 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
5262 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5263
5264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5265 if (IEM_IS_MODRM_REG_MODE(bRm))
5266 {
5267 /*
5268 * Register, register.
5269 */
5270 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5271 if (enmEffOpSize == IEMMODE_64BIT)
5272 {
5273 IEM_MC_BEGIN(2, 0);
5274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5275 IEM_MC_ARG(uint64_t, u64Enc, 1);
5276 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5277 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5278 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
5279 IEM_MC_END();
5280 }
5281 else
5282 {
5283 IEM_MC_BEGIN(2, 0);
5284 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5285 IEM_MC_ARG(uint32_t, u32Enc, 1);
5286 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5288 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
5289 IEM_MC_END();
5290 }
5291 }
5292 else
5293 {
5294 /*
5295 * Memory, register.
5296 */
5297 if (enmEffOpSize == IEMMODE_64BIT)
5298 {
5299 IEM_MC_BEGIN(3, 0);
5300 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5301 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5302 IEM_MC_ARG(uint64_t, u64Enc, 2);
5303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5304 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5305 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5306 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5307 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
5308 IEM_MC_END();
5309 }
5310 else
5311 {
5312 IEM_MC_BEGIN(3, 0);
5313 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5314 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5315 IEM_MC_ARG(uint32_t, u32Enc, 2);
5316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5317 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5318 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5319 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5320 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
5321 IEM_MC_END();
5322 }
5323 }
5324 return VINF_SUCCESS;
5325}
5326#else
5327FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
5328#endif
5329
5330/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
5331FNIEMOP_STUB(iemOp_AmdGrp17);
5332/* Opcode 0xf3 0x0f 0x78 - invalid */
5333/* Opcode 0xf2 0x0f 0x78 - invalid */
5334
5335/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
5336#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5337FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
5338{
5339 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
5340 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
5341 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
5342 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5343
5344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5345 if (IEM_IS_MODRM_REG_MODE(bRm))
5346 {
5347 /*
5348 * Register, register.
5349 */
5350 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5351 if (enmEffOpSize == IEMMODE_64BIT)
5352 {
5353 IEM_MC_BEGIN(2, 0);
5354 IEM_MC_ARG(uint64_t, u64Val, 0);
5355 IEM_MC_ARG(uint64_t, u64Enc, 1);
5356 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5357 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5358 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
5359 IEM_MC_END();
5360 }
5361 else
5362 {
5363 IEM_MC_BEGIN(2, 0);
5364 IEM_MC_ARG(uint32_t, u32Val, 0);
5365 IEM_MC_ARG(uint32_t, u32Enc, 1);
5366 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5367 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5368 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
5369 IEM_MC_END();
5370 }
5371 }
5372 else
5373 {
5374 /*
5375 * Register, memory.
5376 */
5377 if (enmEffOpSize == IEMMODE_64BIT)
5378 {
5379 IEM_MC_BEGIN(3, 0);
5380 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5381 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5382 IEM_MC_ARG(uint64_t, u64Enc, 2);
5383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5384 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5385 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5386 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5387 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
5388 IEM_MC_END();
5389 }
5390 else
5391 {
5392 IEM_MC_BEGIN(3, 0);
5393 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5394 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5395 IEM_MC_ARG(uint32_t, u32Enc, 2);
5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5397 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5398 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5399 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5400 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
5401 IEM_MC_END();
5402 }
5403 }
5404 return VINF_SUCCESS;
5405}
5406#else
5407FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
5408#endif
5409/* Opcode 0x66 0x0f 0x79 - invalid */
5410/* Opcode 0xf3 0x0f 0x79 - invalid */
5411/* Opcode 0xf2 0x0f 0x79 - invalid */
5412
5413/* Opcode 0x0f 0x7a - invalid */
5414/* Opcode 0x66 0x0f 0x7a - invalid */
5415/* Opcode 0xf3 0x0f 0x7a - invalid */
5416/* Opcode 0xf2 0x0f 0x7a - invalid */
5417
5418/* Opcode 0x0f 0x7b - invalid */
5419/* Opcode 0x66 0x0f 0x7b - invalid */
5420/* Opcode 0xf3 0x0f 0x7b - invalid */
5421/* Opcode 0xf2 0x0f 0x7b - invalid */
5422
5423/* Opcode 0x0f 0x7c - invalid */
5424/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5425FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
5426/* Opcode 0xf3 0x0f 0x7c - invalid */
5427/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5428FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
5429
5430/* Opcode 0x0f 0x7d - invalid */
5431/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5432FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
5433/* Opcode 0xf3 0x0f 0x7d - invalid */
5434/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5435FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
5436
5437
5438/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5439FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5440{
5441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5442 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5443 {
5444 /**
5445 * @opcode 0x7e
5446 * @opcodesub rex.w=1
5447 * @oppfx none
5448 * @opcpuid mmx
5449 * @opgroup og_mmx_datamove
5450 * @opxcpttype 5
5451 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5452 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5453 */
5454 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5455 if (IEM_IS_MODRM_REG_MODE(bRm))
5456 {
5457 /* greg64, MMX */
5458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5459 IEM_MC_BEGIN(0, 1);
5460 IEM_MC_LOCAL(uint64_t, u64Tmp);
5461
5462 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5463 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5464
5465 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5466 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5467 IEM_MC_FPU_TO_MMX_MODE();
5468
5469 IEM_MC_ADVANCE_RIP();
5470 IEM_MC_END();
5471 }
5472 else
5473 {
5474 /* [mem64], MMX */
5475 IEM_MC_BEGIN(0, 2);
5476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5477 IEM_MC_LOCAL(uint64_t, u64Tmp);
5478
5479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5481 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5482 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5483
5484 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5485 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5486 IEM_MC_FPU_TO_MMX_MODE();
5487
5488 IEM_MC_ADVANCE_RIP();
5489 IEM_MC_END();
5490 }
5491 }
5492 else
5493 {
5494 /**
5495 * @opdone
5496 * @opcode 0x7e
5497 * @opcodesub rex.w=0
5498 * @oppfx none
5499 * @opcpuid mmx
5500 * @opgroup og_mmx_datamove
5501 * @opxcpttype 5
5502 * @opfunction iemOp_movd_q_Pd_Ey
5503 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5504 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5505 */
5506 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5507 if (IEM_IS_MODRM_REG_MODE(bRm))
5508 {
5509 /* greg32, MMX */
5510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5511 IEM_MC_BEGIN(0, 1);
5512 IEM_MC_LOCAL(uint32_t, u32Tmp);
5513
5514 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5515 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5516
5517 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5518 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5519 IEM_MC_FPU_TO_MMX_MODE();
5520
5521 IEM_MC_ADVANCE_RIP();
5522 IEM_MC_END();
5523 }
5524 else
5525 {
5526 /* [mem32], MMX */
5527 IEM_MC_BEGIN(0, 2);
5528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5529 IEM_MC_LOCAL(uint32_t, u32Tmp);
5530
5531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5533 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5534 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5535
5536 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5537 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5538 IEM_MC_FPU_TO_MMX_MODE();
5539
5540 IEM_MC_ADVANCE_RIP();
5541 IEM_MC_END();
5542 }
5543 }
5544 return VINF_SUCCESS;
5545
5546}
5547
5548
5549FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5550{
5551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5552 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5553 {
5554 /**
5555 * @opcode 0x7e
5556 * @opcodesub rex.w=1
5557 * @oppfx 0x66
5558 * @opcpuid sse2
5559 * @opgroup og_sse2_simdint_datamove
5560 * @opxcpttype 5
5561 * @optest 64-bit / op1=1 op2=2 -> op1=2
5562 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5563 */
5564 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5565 if (IEM_IS_MODRM_REG_MODE(bRm))
5566 {
5567 /* greg64, XMM */
5568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5569 IEM_MC_BEGIN(0, 1);
5570 IEM_MC_LOCAL(uint64_t, u64Tmp);
5571
5572 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5574
5575 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5576 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5577
5578 IEM_MC_ADVANCE_RIP();
5579 IEM_MC_END();
5580 }
5581 else
5582 {
5583 /* [mem64], XMM */
5584 IEM_MC_BEGIN(0, 2);
5585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5586 IEM_MC_LOCAL(uint64_t, u64Tmp);
5587
5588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5590 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5591 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5592
5593 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5594 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5595
5596 IEM_MC_ADVANCE_RIP();
5597 IEM_MC_END();
5598 }
5599 }
5600 else
5601 {
5602 /**
5603 * @opdone
5604 * @opcode 0x7e
5605 * @opcodesub rex.w=0
5606 * @oppfx 0x66
5607 * @opcpuid sse2
5608 * @opgroup og_sse2_simdint_datamove
5609 * @opxcpttype 5
5610 * @opfunction iemOp_movd_q_Vy_Ey
5611 * @optest op1=1 op2=2 -> op1=2
5612 * @optest op1=0 op2=-42 -> op1=-42
5613 */
5614 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5615 if (IEM_IS_MODRM_REG_MODE(bRm))
5616 {
5617 /* greg32, XMM */
5618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5619 IEM_MC_BEGIN(0, 1);
5620 IEM_MC_LOCAL(uint32_t, u32Tmp);
5621
5622 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5623 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5624
5625 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5626 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5627
5628 IEM_MC_ADVANCE_RIP();
5629 IEM_MC_END();
5630 }
5631 else
5632 {
5633 /* [mem32], XMM */
5634 IEM_MC_BEGIN(0, 2);
5635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5636 IEM_MC_LOCAL(uint32_t, u32Tmp);
5637
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5641 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5642
5643 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5644 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5645
5646 IEM_MC_ADVANCE_RIP();
5647 IEM_MC_END();
5648 }
5649 }
5650 return VINF_SUCCESS;
5651
5652}
5653
5654/**
5655 * @opcode 0x7e
5656 * @oppfx 0xf3
5657 * @opcpuid sse2
5658 * @opgroup og_sse2_pcksclr_datamove
5659 * @opxcpttype none
5660 * @optest op1=1 op2=2 -> op1=2
5661 * @optest op1=0 op2=-42 -> op1=-42
5662 */
5663FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5664{
5665 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5667 if (IEM_IS_MODRM_REG_MODE(bRm))
5668 {
5669 /*
5670 * Register, register.
5671 */
5672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5673 IEM_MC_BEGIN(0, 2);
5674 IEM_MC_LOCAL(uint64_t, uSrc);
5675
5676 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5678
5679 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5680 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5681
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 }
5685 else
5686 {
5687 /*
5688 * Memory, register.
5689 */
5690 IEM_MC_BEGIN(0, 2);
5691 IEM_MC_LOCAL(uint64_t, uSrc);
5692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5693
5694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5696 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5697 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5698
5699 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5700 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5701
5702 IEM_MC_ADVANCE_RIP();
5703 IEM_MC_END();
5704 }
5705 return VINF_SUCCESS;
5706}
5707
5708/* Opcode 0xf2 0x0f 0x7e - invalid */
5709
5710
5711/** Opcode 0x0f 0x7f - movq Qq, Pq */
5712FNIEMOP_DEF(iemOp_movq_Qq_Pq)
5713{
5714 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
5715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5716 if (IEM_IS_MODRM_REG_MODE(bRm))
5717 {
5718 /*
5719 * Register, register.
5720 */
5721 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
5722 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724 IEM_MC_BEGIN(0, 1);
5725 IEM_MC_LOCAL(uint64_t, u64Tmp);
5726 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5727 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5728 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5729 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
5730 IEM_MC_FPU_TO_MMX_MODE();
5731 IEM_MC_ADVANCE_RIP();
5732 IEM_MC_END();
5733 }
5734 else
5735 {
5736 /*
5737 * Memory, Register.
5738 */
5739 IEM_MC_BEGIN(0, 2);
5740 IEM_MC_LOCAL(uint64_t, u64Tmp);
5741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5742
5743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5745 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5746 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5747
5748 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5749 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5750 IEM_MC_FPU_TO_MMX_MODE();
5751
5752 IEM_MC_ADVANCE_RIP();
5753 IEM_MC_END();
5754 }
5755 return VINF_SUCCESS;
5756}
5757
5758/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
5759FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
5760{
5761 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5763 if (IEM_IS_MODRM_REG_MODE(bRm))
5764 {
5765 /*
5766 * Register, register.
5767 */
5768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5769 IEM_MC_BEGIN(0, 0);
5770 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5771 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5772 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5773 IEM_GET_MODRM_REG(pVCpu, bRm));
5774 IEM_MC_ADVANCE_RIP();
5775 IEM_MC_END();
5776 }
5777 else
5778 {
5779 /*
5780 * Register, memory.
5781 */
5782 IEM_MC_BEGIN(0, 2);
5783 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5785
5786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5788 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5789 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5790
5791 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5792 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5793
5794 IEM_MC_ADVANCE_RIP();
5795 IEM_MC_END();
5796 }
5797 return VINF_SUCCESS;
5798}
5799
5800/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
5801FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
5802{
5803 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5805 if (IEM_IS_MODRM_REG_MODE(bRm))
5806 {
5807 /*
5808 * Register, register.
5809 */
5810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5811 IEM_MC_BEGIN(0, 0);
5812 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5813 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5814 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5815 IEM_GET_MODRM_REG(pVCpu, bRm));
5816 IEM_MC_ADVANCE_RIP();
5817 IEM_MC_END();
5818 }
5819 else
5820 {
5821 /*
5822 * Register, memory.
5823 */
5824 IEM_MC_BEGIN(0, 2);
5825 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5827
5828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5830 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5831 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5832
5833 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5834 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5835
5836 IEM_MC_ADVANCE_RIP();
5837 IEM_MC_END();
5838 }
5839 return VINF_SUCCESS;
5840}
5841
5842/* Opcode 0xf2 0x0f 0x7f - invalid */
5843
5844
5845
5846/** Opcode 0x0f 0x80. */
5847FNIEMOP_DEF(iemOp_jo_Jv)
5848{
5849 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
5850 IEMOP_HLP_MIN_386();
5851 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5852 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5853 {
5854 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5856
5857 IEM_MC_BEGIN(0, 0);
5858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5859 IEM_MC_REL_JMP_S16(i16Imm);
5860 } IEM_MC_ELSE() {
5861 IEM_MC_ADVANCE_RIP();
5862 } IEM_MC_ENDIF();
5863 IEM_MC_END();
5864 }
5865 else
5866 {
5867 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5869
5870 IEM_MC_BEGIN(0, 0);
5871 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5872 IEM_MC_REL_JMP_S32(i32Imm);
5873 } IEM_MC_ELSE() {
5874 IEM_MC_ADVANCE_RIP();
5875 } IEM_MC_ENDIF();
5876 IEM_MC_END();
5877 }
5878 return VINF_SUCCESS;
5879}
5880
5881
5882/** Opcode 0x0f 0x81. */
5883FNIEMOP_DEF(iemOp_jno_Jv)
5884{
5885 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
5886 IEMOP_HLP_MIN_386();
5887 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5888 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5889 {
5890 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892
5893 IEM_MC_BEGIN(0, 0);
5894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5895 IEM_MC_ADVANCE_RIP();
5896 } IEM_MC_ELSE() {
5897 IEM_MC_REL_JMP_S16(i16Imm);
5898 } IEM_MC_ENDIF();
5899 IEM_MC_END();
5900 }
5901 else
5902 {
5903 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5905
5906 IEM_MC_BEGIN(0, 0);
5907 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5908 IEM_MC_ADVANCE_RIP();
5909 } IEM_MC_ELSE() {
5910 IEM_MC_REL_JMP_S32(i32Imm);
5911 } IEM_MC_ENDIF();
5912 IEM_MC_END();
5913 }
5914 return VINF_SUCCESS;
5915}
5916
5917
5918/** Opcode 0x0f 0x82. */
5919FNIEMOP_DEF(iemOp_jc_Jv)
5920{
5921 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
5922 IEMOP_HLP_MIN_386();
5923 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5924 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5925 {
5926 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928
5929 IEM_MC_BEGIN(0, 0);
5930 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5931 IEM_MC_REL_JMP_S16(i16Imm);
5932 } IEM_MC_ELSE() {
5933 IEM_MC_ADVANCE_RIP();
5934 } IEM_MC_ENDIF();
5935 IEM_MC_END();
5936 }
5937 else
5938 {
5939 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5941
5942 IEM_MC_BEGIN(0, 0);
5943 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5944 IEM_MC_REL_JMP_S32(i32Imm);
5945 } IEM_MC_ELSE() {
5946 IEM_MC_ADVANCE_RIP();
5947 } IEM_MC_ENDIF();
5948 IEM_MC_END();
5949 }
5950 return VINF_SUCCESS;
5951}
5952
5953
5954/** Opcode 0x0f 0x83. */
5955FNIEMOP_DEF(iemOp_jnc_Jv)
5956{
5957 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5958 IEMOP_HLP_MIN_386();
5959 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5960 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5961 {
5962 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5964
5965 IEM_MC_BEGIN(0, 0);
5966 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5967 IEM_MC_ADVANCE_RIP();
5968 } IEM_MC_ELSE() {
5969 IEM_MC_REL_JMP_S16(i16Imm);
5970 } IEM_MC_ENDIF();
5971 IEM_MC_END();
5972 }
5973 else
5974 {
5975 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5977
5978 IEM_MC_BEGIN(0, 0);
5979 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5980 IEM_MC_ADVANCE_RIP();
5981 } IEM_MC_ELSE() {
5982 IEM_MC_REL_JMP_S32(i32Imm);
5983 } IEM_MC_ENDIF();
5984 IEM_MC_END();
5985 }
5986 return VINF_SUCCESS;
5987}
5988
5989
5990/** Opcode 0x0f 0x84. */
5991FNIEMOP_DEF(iemOp_je_Jv)
5992{
5993 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5994 IEMOP_HLP_MIN_386();
5995 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5996 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5997 {
5998 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6000
6001 IEM_MC_BEGIN(0, 0);
6002 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6003 IEM_MC_REL_JMP_S16(i16Imm);
6004 } IEM_MC_ELSE() {
6005 IEM_MC_ADVANCE_RIP();
6006 } IEM_MC_ENDIF();
6007 IEM_MC_END();
6008 }
6009 else
6010 {
6011 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6013
6014 IEM_MC_BEGIN(0, 0);
6015 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6016 IEM_MC_REL_JMP_S32(i32Imm);
6017 } IEM_MC_ELSE() {
6018 IEM_MC_ADVANCE_RIP();
6019 } IEM_MC_ENDIF();
6020 IEM_MC_END();
6021 }
6022 return VINF_SUCCESS;
6023}
6024
6025
6026/** Opcode 0x0f 0x85. */
6027FNIEMOP_DEF(iemOp_jne_Jv)
6028{
6029 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
6030 IEMOP_HLP_MIN_386();
6031 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6032 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6033 {
6034 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6036
6037 IEM_MC_BEGIN(0, 0);
6038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6039 IEM_MC_ADVANCE_RIP();
6040 } IEM_MC_ELSE() {
6041 IEM_MC_REL_JMP_S16(i16Imm);
6042 } IEM_MC_ENDIF();
6043 IEM_MC_END();
6044 }
6045 else
6046 {
6047 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6049
6050 IEM_MC_BEGIN(0, 0);
6051 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6052 IEM_MC_ADVANCE_RIP();
6053 } IEM_MC_ELSE() {
6054 IEM_MC_REL_JMP_S32(i32Imm);
6055 } IEM_MC_ENDIF();
6056 IEM_MC_END();
6057 }
6058 return VINF_SUCCESS;
6059}
6060
6061
6062/** Opcode 0x0f 0x86. */
6063FNIEMOP_DEF(iemOp_jbe_Jv)
6064{
6065 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
6066 IEMOP_HLP_MIN_386();
6067 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6068 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6069 {
6070 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6072
6073 IEM_MC_BEGIN(0, 0);
6074 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6075 IEM_MC_REL_JMP_S16(i16Imm);
6076 } IEM_MC_ELSE() {
6077 IEM_MC_ADVANCE_RIP();
6078 } IEM_MC_ENDIF();
6079 IEM_MC_END();
6080 }
6081 else
6082 {
6083 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6085
6086 IEM_MC_BEGIN(0, 0);
6087 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6088 IEM_MC_REL_JMP_S32(i32Imm);
6089 } IEM_MC_ELSE() {
6090 IEM_MC_ADVANCE_RIP();
6091 } IEM_MC_ENDIF();
6092 IEM_MC_END();
6093 }
6094 return VINF_SUCCESS;
6095}
6096
6097
6098/** Opcode 0x0f 0x87. */
6099FNIEMOP_DEF(iemOp_jnbe_Jv)
6100{
6101 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
6102 IEMOP_HLP_MIN_386();
6103 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6104 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6105 {
6106 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6108
6109 IEM_MC_BEGIN(0, 0);
6110 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6111 IEM_MC_ADVANCE_RIP();
6112 } IEM_MC_ELSE() {
6113 IEM_MC_REL_JMP_S16(i16Imm);
6114 } IEM_MC_ENDIF();
6115 IEM_MC_END();
6116 }
6117 else
6118 {
6119 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6121
6122 IEM_MC_BEGIN(0, 0);
6123 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6124 IEM_MC_ADVANCE_RIP();
6125 } IEM_MC_ELSE() {
6126 IEM_MC_REL_JMP_S32(i32Imm);
6127 } IEM_MC_ENDIF();
6128 IEM_MC_END();
6129 }
6130 return VINF_SUCCESS;
6131}
6132
6133
6134/** Opcode 0x0f 0x88. */
6135FNIEMOP_DEF(iemOp_js_Jv)
6136{
6137 IEMOP_MNEMONIC(js_Jv, "js Jv");
6138 IEMOP_HLP_MIN_386();
6139 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6140 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6141 {
6142 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6144
6145 IEM_MC_BEGIN(0, 0);
6146 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6147 IEM_MC_REL_JMP_S16(i16Imm);
6148 } IEM_MC_ELSE() {
6149 IEM_MC_ADVANCE_RIP();
6150 } IEM_MC_ENDIF();
6151 IEM_MC_END();
6152 }
6153 else
6154 {
6155 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6157
6158 IEM_MC_BEGIN(0, 0);
6159 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6160 IEM_MC_REL_JMP_S32(i32Imm);
6161 } IEM_MC_ELSE() {
6162 IEM_MC_ADVANCE_RIP();
6163 } IEM_MC_ENDIF();
6164 IEM_MC_END();
6165 }
6166 return VINF_SUCCESS;
6167}
6168
6169
6170/** Opcode 0x0f 0x89. */
6171FNIEMOP_DEF(iemOp_jns_Jv)
6172{
6173 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
6174 IEMOP_HLP_MIN_386();
6175 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6176 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6177 {
6178 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180
6181 IEM_MC_BEGIN(0, 0);
6182 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6183 IEM_MC_ADVANCE_RIP();
6184 } IEM_MC_ELSE() {
6185 IEM_MC_REL_JMP_S16(i16Imm);
6186 } IEM_MC_ENDIF();
6187 IEM_MC_END();
6188 }
6189 else
6190 {
6191 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6193
6194 IEM_MC_BEGIN(0, 0);
6195 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6196 IEM_MC_ADVANCE_RIP();
6197 } IEM_MC_ELSE() {
6198 IEM_MC_REL_JMP_S32(i32Imm);
6199 } IEM_MC_ENDIF();
6200 IEM_MC_END();
6201 }
6202 return VINF_SUCCESS;
6203}
6204
6205
6206/** Opcode 0x0f 0x8a. */
6207FNIEMOP_DEF(iemOp_jp_Jv)
6208{
6209 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
6210 IEMOP_HLP_MIN_386();
6211 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6212 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6213 {
6214 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6216
6217 IEM_MC_BEGIN(0, 0);
6218 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6219 IEM_MC_REL_JMP_S16(i16Imm);
6220 } IEM_MC_ELSE() {
6221 IEM_MC_ADVANCE_RIP();
6222 } IEM_MC_ENDIF();
6223 IEM_MC_END();
6224 }
6225 else
6226 {
6227 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6229
6230 IEM_MC_BEGIN(0, 0);
6231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6232 IEM_MC_REL_JMP_S32(i32Imm);
6233 } IEM_MC_ELSE() {
6234 IEM_MC_ADVANCE_RIP();
6235 } IEM_MC_ENDIF();
6236 IEM_MC_END();
6237 }
6238 return VINF_SUCCESS;
6239}
6240
6241
6242/** Opcode 0x0f 0x8b. */
6243FNIEMOP_DEF(iemOp_jnp_Jv)
6244{
6245 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
6246 IEMOP_HLP_MIN_386();
6247 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6248 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6249 {
6250 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6252
6253 IEM_MC_BEGIN(0, 0);
6254 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6255 IEM_MC_ADVANCE_RIP();
6256 } IEM_MC_ELSE() {
6257 IEM_MC_REL_JMP_S16(i16Imm);
6258 } IEM_MC_ENDIF();
6259 IEM_MC_END();
6260 }
6261 else
6262 {
6263 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6265
6266 IEM_MC_BEGIN(0, 0);
6267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6268 IEM_MC_ADVANCE_RIP();
6269 } IEM_MC_ELSE() {
6270 IEM_MC_REL_JMP_S32(i32Imm);
6271 } IEM_MC_ENDIF();
6272 IEM_MC_END();
6273 }
6274 return VINF_SUCCESS;
6275}
6276
6277
6278/** Opcode 0x0f 0x8c. */
6279FNIEMOP_DEF(iemOp_jl_Jv)
6280{
6281 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
6282 IEMOP_HLP_MIN_386();
6283 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6284 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6285 {
6286 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6288
6289 IEM_MC_BEGIN(0, 0);
6290 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6291 IEM_MC_REL_JMP_S16(i16Imm);
6292 } IEM_MC_ELSE() {
6293 IEM_MC_ADVANCE_RIP();
6294 } IEM_MC_ENDIF();
6295 IEM_MC_END();
6296 }
6297 else
6298 {
6299 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6301
6302 IEM_MC_BEGIN(0, 0);
6303 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6304 IEM_MC_REL_JMP_S32(i32Imm);
6305 } IEM_MC_ELSE() {
6306 IEM_MC_ADVANCE_RIP();
6307 } IEM_MC_ENDIF();
6308 IEM_MC_END();
6309 }
6310 return VINF_SUCCESS;
6311}
6312
6313
6314/** Opcode 0x0f 0x8d. */
6315FNIEMOP_DEF(iemOp_jnl_Jv)
6316{
6317 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
6318 IEMOP_HLP_MIN_386();
6319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6320 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6321 {
6322 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6324
6325 IEM_MC_BEGIN(0, 0);
6326 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6327 IEM_MC_ADVANCE_RIP();
6328 } IEM_MC_ELSE() {
6329 IEM_MC_REL_JMP_S16(i16Imm);
6330 } IEM_MC_ENDIF();
6331 IEM_MC_END();
6332 }
6333 else
6334 {
6335 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6337
6338 IEM_MC_BEGIN(0, 0);
6339 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6340 IEM_MC_ADVANCE_RIP();
6341 } IEM_MC_ELSE() {
6342 IEM_MC_REL_JMP_S32(i32Imm);
6343 } IEM_MC_ENDIF();
6344 IEM_MC_END();
6345 }
6346 return VINF_SUCCESS;
6347}
6348
6349
6350/** Opcode 0x0f 0x8e. */
6351FNIEMOP_DEF(iemOp_jle_Jv)
6352{
6353 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
6354 IEMOP_HLP_MIN_386();
6355 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6356 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6357 {
6358 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6360
6361 IEM_MC_BEGIN(0, 0);
6362 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6363 IEM_MC_REL_JMP_S16(i16Imm);
6364 } IEM_MC_ELSE() {
6365 IEM_MC_ADVANCE_RIP();
6366 } IEM_MC_ENDIF();
6367 IEM_MC_END();
6368 }
6369 else
6370 {
6371 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6373
6374 IEM_MC_BEGIN(0, 0);
6375 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6376 IEM_MC_REL_JMP_S32(i32Imm);
6377 } IEM_MC_ELSE() {
6378 IEM_MC_ADVANCE_RIP();
6379 } IEM_MC_ENDIF();
6380 IEM_MC_END();
6381 }
6382 return VINF_SUCCESS;
6383}
6384
6385
6386/** Opcode 0x0f 0x8f. */
6387FNIEMOP_DEF(iemOp_jnle_Jv)
6388{
6389 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
6390 IEMOP_HLP_MIN_386();
6391 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6392 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6393 {
6394 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6396
6397 IEM_MC_BEGIN(0, 0);
6398 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6399 IEM_MC_ADVANCE_RIP();
6400 } IEM_MC_ELSE() {
6401 IEM_MC_REL_JMP_S16(i16Imm);
6402 } IEM_MC_ENDIF();
6403 IEM_MC_END();
6404 }
6405 else
6406 {
6407 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6409
6410 IEM_MC_BEGIN(0, 0);
6411 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6412 IEM_MC_ADVANCE_RIP();
6413 } IEM_MC_ELSE() {
6414 IEM_MC_REL_JMP_S32(i32Imm);
6415 } IEM_MC_ENDIF();
6416 IEM_MC_END();
6417 }
6418 return VINF_SUCCESS;
6419}
6420
6421
6422/** Opcode 0x0f 0x90. */
6423FNIEMOP_DEF(iemOp_seto_Eb)
6424{
6425 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6426 IEMOP_HLP_MIN_386();
6427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6428
6429 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6430 * any way. AMD says it's "unused", whatever that means. We're
6431 * ignoring for now. */
6432 if (IEM_IS_MODRM_REG_MODE(bRm))
6433 {
6434 /* register target */
6435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6436 IEM_MC_BEGIN(0, 0);
6437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6438 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6439 } IEM_MC_ELSE() {
6440 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6441 } IEM_MC_ENDIF();
6442 IEM_MC_ADVANCE_RIP();
6443 IEM_MC_END();
6444 }
6445 else
6446 {
6447 /* memory target */
6448 IEM_MC_BEGIN(0, 1);
6449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6452 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6453 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6454 } IEM_MC_ELSE() {
6455 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6456 } IEM_MC_ENDIF();
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 }
6460 return VINF_SUCCESS;
6461}
6462
6463
6464/** Opcode 0x0f 0x91. */
6465FNIEMOP_DEF(iemOp_setno_Eb)
6466{
6467 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6468 IEMOP_HLP_MIN_386();
6469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6470
6471 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6472 * any way. AMD says it's "unused", whatever that means. We're
6473 * ignoring for now. */
6474 if (IEM_IS_MODRM_REG_MODE(bRm))
6475 {
6476 /* register target */
6477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6478 IEM_MC_BEGIN(0, 0);
6479 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6480 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6481 } IEM_MC_ELSE() {
6482 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6483 } IEM_MC_ENDIF();
6484 IEM_MC_ADVANCE_RIP();
6485 IEM_MC_END();
6486 }
6487 else
6488 {
6489 /* memory target */
6490 IEM_MC_BEGIN(0, 1);
6491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6495 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6496 } IEM_MC_ELSE() {
6497 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6498 } IEM_MC_ENDIF();
6499 IEM_MC_ADVANCE_RIP();
6500 IEM_MC_END();
6501 }
6502 return VINF_SUCCESS;
6503}
6504
6505
6506/** Opcode 0x0f 0x92. */
6507FNIEMOP_DEF(iemOp_setc_Eb)
6508{
6509 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6510 IEMOP_HLP_MIN_386();
6511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6512
6513 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6514 * any way. AMD says it's "unused", whatever that means. We're
6515 * ignoring for now. */
6516 if (IEM_IS_MODRM_REG_MODE(bRm))
6517 {
6518 /* register target */
6519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6520 IEM_MC_BEGIN(0, 0);
6521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6522 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6523 } IEM_MC_ELSE() {
6524 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6525 } IEM_MC_ENDIF();
6526 IEM_MC_ADVANCE_RIP();
6527 IEM_MC_END();
6528 }
6529 else
6530 {
6531 /* memory target */
6532 IEM_MC_BEGIN(0, 1);
6533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6537 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6538 } IEM_MC_ELSE() {
6539 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6540 } IEM_MC_ENDIF();
6541 IEM_MC_ADVANCE_RIP();
6542 IEM_MC_END();
6543 }
6544 return VINF_SUCCESS;
6545}
6546
6547
6548/** Opcode 0x0f 0x93. */
6549FNIEMOP_DEF(iemOp_setnc_Eb)
6550{
6551 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6552 IEMOP_HLP_MIN_386();
6553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6554
6555 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6556 * any way. AMD says it's "unused", whatever that means. We're
6557 * ignoring for now. */
6558 if (IEM_IS_MODRM_REG_MODE(bRm))
6559 {
6560 /* register target */
6561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6562 IEM_MC_BEGIN(0, 0);
6563 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6564 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6565 } IEM_MC_ELSE() {
6566 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6567 } IEM_MC_ENDIF();
6568 IEM_MC_ADVANCE_RIP();
6569 IEM_MC_END();
6570 }
6571 else
6572 {
6573 /* memory target */
6574 IEM_MC_BEGIN(0, 1);
6575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6579 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6580 } IEM_MC_ELSE() {
6581 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6582 } IEM_MC_ENDIF();
6583 IEM_MC_ADVANCE_RIP();
6584 IEM_MC_END();
6585 }
6586 return VINF_SUCCESS;
6587}
6588
6589
6590/** Opcode 0x0f 0x94. */
6591FNIEMOP_DEF(iemOp_sete_Eb)
6592{
6593 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6594 IEMOP_HLP_MIN_386();
6595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6596
6597 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6598 * any way. AMD says it's "unused", whatever that means. We're
6599 * ignoring for now. */
6600 if (IEM_IS_MODRM_REG_MODE(bRm))
6601 {
6602 /* register target */
6603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6604 IEM_MC_BEGIN(0, 0);
6605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6606 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6607 } IEM_MC_ELSE() {
6608 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6609 } IEM_MC_ENDIF();
6610 IEM_MC_ADVANCE_RIP();
6611 IEM_MC_END();
6612 }
6613 else
6614 {
6615 /* memory target */
6616 IEM_MC_BEGIN(0, 1);
6617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6621 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6622 } IEM_MC_ELSE() {
6623 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6624 } IEM_MC_ENDIF();
6625 IEM_MC_ADVANCE_RIP();
6626 IEM_MC_END();
6627 }
6628 return VINF_SUCCESS;
6629}
6630
6631
6632/** Opcode 0x0f 0x95. */
6633FNIEMOP_DEF(iemOp_setne_Eb)
6634{
6635 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6636 IEMOP_HLP_MIN_386();
6637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6638
6639 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6640 * any way. AMD says it's "unused", whatever that means. We're
6641 * ignoring for now. */
6642 if (IEM_IS_MODRM_REG_MODE(bRm))
6643 {
6644 /* register target */
6645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6646 IEM_MC_BEGIN(0, 0);
6647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6648 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6649 } IEM_MC_ELSE() {
6650 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6651 } IEM_MC_ENDIF();
6652 IEM_MC_ADVANCE_RIP();
6653 IEM_MC_END();
6654 }
6655 else
6656 {
6657 /* memory target */
6658 IEM_MC_BEGIN(0, 1);
6659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6662 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6663 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6664 } IEM_MC_ELSE() {
6665 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6666 } IEM_MC_ENDIF();
6667 IEM_MC_ADVANCE_RIP();
6668 IEM_MC_END();
6669 }
6670 return VINF_SUCCESS;
6671}
6672
6673
6674/** Opcode 0x0f 0x96. */
6675FNIEMOP_DEF(iemOp_setbe_Eb)
6676{
6677 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
6678 IEMOP_HLP_MIN_386();
6679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6680
6681 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6682 * any way. AMD says it's "unused", whatever that means. We're
6683 * ignoring for now. */
6684 if (IEM_IS_MODRM_REG_MODE(bRm))
6685 {
6686 /* register target */
6687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6688 IEM_MC_BEGIN(0, 0);
6689 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6690 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6691 } IEM_MC_ELSE() {
6692 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6693 } IEM_MC_ENDIF();
6694 IEM_MC_ADVANCE_RIP();
6695 IEM_MC_END();
6696 }
6697 else
6698 {
6699 /* memory target */
6700 IEM_MC_BEGIN(0, 1);
6701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6704 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6705 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6706 } IEM_MC_ELSE() {
6707 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6708 } IEM_MC_ENDIF();
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711 }
6712 return VINF_SUCCESS;
6713}
6714
6715
6716/** Opcode 0x0f 0x97. */
6717FNIEMOP_DEF(iemOp_setnbe_Eb)
6718{
6719 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
6720 IEMOP_HLP_MIN_386();
6721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6722
6723 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6724 * any way. AMD says it's "unused", whatever that means. We're
6725 * ignoring for now. */
6726 if (IEM_IS_MODRM_REG_MODE(bRm))
6727 {
6728 /* register target */
6729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6730 IEM_MC_BEGIN(0, 0);
6731 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6732 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6733 } IEM_MC_ELSE() {
6734 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6735 } IEM_MC_ENDIF();
6736 IEM_MC_ADVANCE_RIP();
6737 IEM_MC_END();
6738 }
6739 else
6740 {
6741 /* memory target */
6742 IEM_MC_BEGIN(0, 1);
6743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6746 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6747 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6748 } IEM_MC_ELSE() {
6749 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6750 } IEM_MC_ENDIF();
6751 IEM_MC_ADVANCE_RIP();
6752 IEM_MC_END();
6753 }
6754 return VINF_SUCCESS;
6755}
6756
6757
6758/** Opcode 0x0f 0x98. */
6759FNIEMOP_DEF(iemOp_sets_Eb)
6760{
6761 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
6762 IEMOP_HLP_MIN_386();
6763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6764
6765 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6766 * any way. AMD says it's "unused", whatever that means. We're
6767 * ignoring for now. */
6768 if (IEM_IS_MODRM_REG_MODE(bRm))
6769 {
6770 /* register target */
6771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6772 IEM_MC_BEGIN(0, 0);
6773 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6774 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6775 } IEM_MC_ELSE() {
6776 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6777 } IEM_MC_ENDIF();
6778 IEM_MC_ADVANCE_RIP();
6779 IEM_MC_END();
6780 }
6781 else
6782 {
6783 /* memory target */
6784 IEM_MC_BEGIN(0, 1);
6785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6789 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6790 } IEM_MC_ELSE() {
6791 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6792 } IEM_MC_ENDIF();
6793 IEM_MC_ADVANCE_RIP();
6794 IEM_MC_END();
6795 }
6796 return VINF_SUCCESS;
6797}
6798
6799
6800/** Opcode 0x0f 0x99. */
6801FNIEMOP_DEF(iemOp_setns_Eb)
6802{
6803 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
6804 IEMOP_HLP_MIN_386();
6805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6806
6807 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6808 * any way. AMD says it's "unused", whatever that means. We're
6809 * ignoring for now. */
6810 if (IEM_IS_MODRM_REG_MODE(bRm))
6811 {
6812 /* register target */
6813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6814 IEM_MC_BEGIN(0, 0);
6815 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6816 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6817 } IEM_MC_ELSE() {
6818 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6819 } IEM_MC_ENDIF();
6820 IEM_MC_ADVANCE_RIP();
6821 IEM_MC_END();
6822 }
6823 else
6824 {
6825 /* memory target */
6826 IEM_MC_BEGIN(0, 1);
6827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6831 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6832 } IEM_MC_ELSE() {
6833 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6834 } IEM_MC_ENDIF();
6835 IEM_MC_ADVANCE_RIP();
6836 IEM_MC_END();
6837 }
6838 return VINF_SUCCESS;
6839}
6840
6841
6842/** Opcode 0x0f 0x9a. */
6843FNIEMOP_DEF(iemOp_setp_Eb)
6844{
6845 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
6846 IEMOP_HLP_MIN_386();
6847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6848
6849 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6850 * any way. AMD says it's "unused", whatever that means. We're
6851 * ignoring for now. */
6852 if (IEM_IS_MODRM_REG_MODE(bRm))
6853 {
6854 /* register target */
6855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6856 IEM_MC_BEGIN(0, 0);
6857 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6858 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6859 } IEM_MC_ELSE() {
6860 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6861 } IEM_MC_ENDIF();
6862 IEM_MC_ADVANCE_RIP();
6863 IEM_MC_END();
6864 }
6865 else
6866 {
6867 /* memory target */
6868 IEM_MC_BEGIN(0, 1);
6869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6873 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6874 } IEM_MC_ELSE() {
6875 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6876 } IEM_MC_ENDIF();
6877 IEM_MC_ADVANCE_RIP();
6878 IEM_MC_END();
6879 }
6880 return VINF_SUCCESS;
6881}
6882
6883
6884/** Opcode 0x0f 0x9b. */
6885FNIEMOP_DEF(iemOp_setnp_Eb)
6886{
6887 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
6888 IEMOP_HLP_MIN_386();
6889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6890
6891 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6892 * any way. AMD says it's "unused", whatever that means. We're
6893 * ignoring for now. */
6894 if (IEM_IS_MODRM_REG_MODE(bRm))
6895 {
6896 /* register target */
6897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6898 IEM_MC_BEGIN(0, 0);
6899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6900 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6901 } IEM_MC_ELSE() {
6902 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6903 } IEM_MC_ENDIF();
6904 IEM_MC_ADVANCE_RIP();
6905 IEM_MC_END();
6906 }
6907 else
6908 {
6909 /* memory target */
6910 IEM_MC_BEGIN(0, 1);
6911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6915 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6916 } IEM_MC_ELSE() {
6917 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6918 } IEM_MC_ENDIF();
6919 IEM_MC_ADVANCE_RIP();
6920 IEM_MC_END();
6921 }
6922 return VINF_SUCCESS;
6923}
6924
6925
6926/** Opcode 0x0f 0x9c. */
6927FNIEMOP_DEF(iemOp_setl_Eb)
6928{
6929 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
6930 IEMOP_HLP_MIN_386();
6931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6932
6933 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6934 * any way. AMD says it's "unused", whatever that means. We're
6935 * ignoring for now. */
6936 if (IEM_IS_MODRM_REG_MODE(bRm))
6937 {
6938 /* register target */
6939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6940 IEM_MC_BEGIN(0, 0);
6941 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6942 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6943 } IEM_MC_ELSE() {
6944 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6945 } IEM_MC_ENDIF();
6946 IEM_MC_ADVANCE_RIP();
6947 IEM_MC_END();
6948 }
6949 else
6950 {
6951 /* memory target */
6952 IEM_MC_BEGIN(0, 1);
6953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6957 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6958 } IEM_MC_ELSE() {
6959 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6960 } IEM_MC_ENDIF();
6961 IEM_MC_ADVANCE_RIP();
6962 IEM_MC_END();
6963 }
6964 return VINF_SUCCESS;
6965}
6966
6967
6968/** Opcode 0x0f 0x9d. */
6969FNIEMOP_DEF(iemOp_setnl_Eb)
6970{
6971 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6972 IEMOP_HLP_MIN_386();
6973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6974
6975 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6976 * any way. AMD says it's "unused", whatever that means. We're
6977 * ignoring for now. */
6978 if (IEM_IS_MODRM_REG_MODE(bRm))
6979 {
6980 /* register target */
6981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6982 IEM_MC_BEGIN(0, 0);
6983 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6984 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6985 } IEM_MC_ELSE() {
6986 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6987 } IEM_MC_ENDIF();
6988 IEM_MC_ADVANCE_RIP();
6989 IEM_MC_END();
6990 }
6991 else
6992 {
6993 /* memory target */
6994 IEM_MC_BEGIN(0, 1);
6995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6998 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6999 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7000 } IEM_MC_ELSE() {
7001 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7002 } IEM_MC_ENDIF();
7003 IEM_MC_ADVANCE_RIP();
7004 IEM_MC_END();
7005 }
7006 return VINF_SUCCESS;
7007}
7008
7009
7010/** Opcode 0x0f 0x9e. */
7011FNIEMOP_DEF(iemOp_setle_Eb)
7012{
7013 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
7014 IEMOP_HLP_MIN_386();
7015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7016
7017 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7018 * any way. AMD says it's "unused", whatever that means. We're
7019 * ignoring for now. */
7020 if (IEM_IS_MODRM_REG_MODE(bRm))
7021 {
7022 /* register target */
7023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7024 IEM_MC_BEGIN(0, 0);
7025 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7026 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7027 } IEM_MC_ELSE() {
7028 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7029 } IEM_MC_ENDIF();
7030 IEM_MC_ADVANCE_RIP();
7031 IEM_MC_END();
7032 }
7033 else
7034 {
7035 /* memory target */
7036 IEM_MC_BEGIN(0, 1);
7037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7040 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7041 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7042 } IEM_MC_ELSE() {
7043 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7044 } IEM_MC_ENDIF();
7045 IEM_MC_ADVANCE_RIP();
7046 IEM_MC_END();
7047 }
7048 return VINF_SUCCESS;
7049}
7050
7051
7052/** Opcode 0x0f 0x9f. */
7053FNIEMOP_DEF(iemOp_setnle_Eb)
7054{
7055 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
7056 IEMOP_HLP_MIN_386();
7057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7058
7059 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7060 * any way. AMD says it's "unused", whatever that means. We're
7061 * ignoring for now. */
7062 if (IEM_IS_MODRM_REG_MODE(bRm))
7063 {
7064 /* register target */
7065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7066 IEM_MC_BEGIN(0, 0);
7067 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7068 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7069 } IEM_MC_ELSE() {
7070 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7071 } IEM_MC_ENDIF();
7072 IEM_MC_ADVANCE_RIP();
7073 IEM_MC_END();
7074 }
7075 else
7076 {
7077 /* memory target */
7078 IEM_MC_BEGIN(0, 1);
7079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7082 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7083 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7084 } IEM_MC_ELSE() {
7085 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7086 } IEM_MC_ENDIF();
7087 IEM_MC_ADVANCE_RIP();
7088 IEM_MC_END();
7089 }
7090 return VINF_SUCCESS;
7091}
7092
7093
7094/**
7095 * Common 'push segment-register' helper.
7096 */
7097FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
7098{
7099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7100 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
7101 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7102
7103 switch (pVCpu->iem.s.enmEffOpSize)
7104 {
7105 case IEMMODE_16BIT:
7106 IEM_MC_BEGIN(0, 1);
7107 IEM_MC_LOCAL(uint16_t, u16Value);
7108 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
7109 IEM_MC_PUSH_U16(u16Value);
7110 IEM_MC_ADVANCE_RIP();
7111 IEM_MC_END();
7112 break;
7113
7114 case IEMMODE_32BIT:
7115 IEM_MC_BEGIN(0, 1);
7116 IEM_MC_LOCAL(uint32_t, u32Value);
7117 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
7118 IEM_MC_PUSH_U32_SREG(u32Value);
7119 IEM_MC_ADVANCE_RIP();
7120 IEM_MC_END();
7121 break;
7122
7123 case IEMMODE_64BIT:
7124 IEM_MC_BEGIN(0, 1);
7125 IEM_MC_LOCAL(uint64_t, u64Value);
7126 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
7127 IEM_MC_PUSH_U64(u64Value);
7128 IEM_MC_ADVANCE_RIP();
7129 IEM_MC_END();
7130 break;
7131 }
7132
7133 return VINF_SUCCESS;
7134}
7135
7136
7137/** Opcode 0x0f 0xa0. */
7138FNIEMOP_DEF(iemOp_push_fs)
7139{
7140 IEMOP_MNEMONIC(push_fs, "push fs");
7141 IEMOP_HLP_MIN_386();
7142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7143 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
7144}
7145
7146
7147/** Opcode 0x0f 0xa1. */
7148FNIEMOP_DEF(iemOp_pop_fs)
7149{
7150 IEMOP_MNEMONIC(pop_fs, "pop fs");
7151 IEMOP_HLP_MIN_386();
7152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7153 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
7154}
7155
7156
7157/** Opcode 0x0f 0xa2. */
7158FNIEMOP_DEF(iemOp_cpuid)
7159{
7160 IEMOP_MNEMONIC(cpuid, "cpuid");
7161 IEMOP_HLP_MIN_486(); /* not all 486es. */
7162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7163 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
7164}
7165
7166
7167/**
7168 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
7169 * iemOp_bts_Ev_Gv.
7170 */
7171FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
7172{
7173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7174 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7175
7176 if (IEM_IS_MODRM_REG_MODE(bRm))
7177 {
7178 /* register destination. */
7179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7180 switch (pVCpu->iem.s.enmEffOpSize)
7181 {
7182 case IEMMODE_16BIT:
7183 IEM_MC_BEGIN(3, 0);
7184 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7185 IEM_MC_ARG(uint16_t, u16Src, 1);
7186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7187
7188 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7189 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
7190 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7191 IEM_MC_REF_EFLAGS(pEFlags);
7192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7193
7194 IEM_MC_ADVANCE_RIP();
7195 IEM_MC_END();
7196 return VINF_SUCCESS;
7197
7198 case IEMMODE_32BIT:
7199 IEM_MC_BEGIN(3, 0);
7200 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7201 IEM_MC_ARG(uint32_t, u32Src, 1);
7202 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7203
7204 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7205 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
7206 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7207 IEM_MC_REF_EFLAGS(pEFlags);
7208 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7209
7210 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7211 IEM_MC_ADVANCE_RIP();
7212 IEM_MC_END();
7213 return VINF_SUCCESS;
7214
7215 case IEMMODE_64BIT:
7216 IEM_MC_BEGIN(3, 0);
7217 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7218 IEM_MC_ARG(uint64_t, u64Src, 1);
7219 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7220
7221 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7222 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
7223 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7224 IEM_MC_REF_EFLAGS(pEFlags);
7225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7226
7227 IEM_MC_ADVANCE_RIP();
7228 IEM_MC_END();
7229 return VINF_SUCCESS;
7230
7231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7232 }
7233 }
7234 else
7235 {
7236 /* memory destination. */
7237
7238 uint32_t fAccess;
7239 if (pImpl->pfnLockedU16)
7240 fAccess = IEM_ACCESS_DATA_RW;
7241 else /* BT */
7242 fAccess = IEM_ACCESS_DATA_R;
7243
7244 /** @todo test negative bit offsets! */
7245 switch (pVCpu->iem.s.enmEffOpSize)
7246 {
7247 case IEMMODE_16BIT:
7248 IEM_MC_BEGIN(3, 2);
7249 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7250 IEM_MC_ARG(uint16_t, u16Src, 1);
7251 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7253 IEM_MC_LOCAL(int16_t, i16AddrAdj);
7254
7255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7256 if (pImpl->pfnLockedU16)
7257 IEMOP_HLP_DONE_DECODING();
7258 else
7259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7260 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7261 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
7262 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
7263 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
7264 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
7265 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
7266 IEM_MC_FETCH_EFLAGS(EFlags);
7267
7268 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7270 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7271 else
7272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7273 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7274
7275 IEM_MC_COMMIT_EFLAGS(EFlags);
7276 IEM_MC_ADVANCE_RIP();
7277 IEM_MC_END();
7278 return VINF_SUCCESS;
7279
7280 case IEMMODE_32BIT:
7281 IEM_MC_BEGIN(3, 2);
7282 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7283 IEM_MC_ARG(uint32_t, u32Src, 1);
7284 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7286 IEM_MC_LOCAL(int32_t, i32AddrAdj);
7287
7288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7289 if (pImpl->pfnLockedU16)
7290 IEMOP_HLP_DONE_DECODING();
7291 else
7292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7293 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7294 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
7295 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
7296 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
7297 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
7298 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
7299 IEM_MC_FETCH_EFLAGS(EFlags);
7300
7301 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7302 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7303 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7304 else
7305 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7306 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7307
7308 IEM_MC_COMMIT_EFLAGS(EFlags);
7309 IEM_MC_ADVANCE_RIP();
7310 IEM_MC_END();
7311 return VINF_SUCCESS;
7312
7313 case IEMMODE_64BIT:
7314 IEM_MC_BEGIN(3, 2);
7315 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7316 IEM_MC_ARG(uint64_t, u64Src, 1);
7317 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7319 IEM_MC_LOCAL(int64_t, i64AddrAdj);
7320
7321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7322 if (pImpl->pfnLockedU16)
7323 IEMOP_HLP_DONE_DECODING();
7324 else
7325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7326 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7327 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
7328 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
7329 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
7330 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
7331 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
7332 IEM_MC_FETCH_EFLAGS(EFlags);
7333
7334 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7335 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7337 else
7338 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7339 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7340
7341 IEM_MC_COMMIT_EFLAGS(EFlags);
7342 IEM_MC_ADVANCE_RIP();
7343 IEM_MC_END();
7344 return VINF_SUCCESS;
7345
7346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7347 }
7348 }
7349}
7350
7351
7352/** Opcode 0x0f 0xa3. */
7353FNIEMOP_DEF(iemOp_bt_Ev_Gv)
7354{
7355 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
7356 IEMOP_HLP_MIN_386();
7357 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
7358}
7359
7360
7361/**
7362 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
7363 */
7364FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
7365{
7366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7367 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7368
7369 if (IEM_IS_MODRM_REG_MODE(bRm))
7370 {
7371 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7373
7374 switch (pVCpu->iem.s.enmEffOpSize)
7375 {
7376 case IEMMODE_16BIT:
7377 IEM_MC_BEGIN(4, 0);
7378 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7379 IEM_MC_ARG(uint16_t, u16Src, 1);
7380 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7381 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7382
7383 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7384 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7385 IEM_MC_REF_EFLAGS(pEFlags);
7386 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7387
7388 IEM_MC_ADVANCE_RIP();
7389 IEM_MC_END();
7390 return VINF_SUCCESS;
7391
7392 case IEMMODE_32BIT:
7393 IEM_MC_BEGIN(4, 0);
7394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7395 IEM_MC_ARG(uint32_t, u32Src, 1);
7396 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7397 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7398
7399 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7400 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7401 IEM_MC_REF_EFLAGS(pEFlags);
7402 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7403
7404 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7405 IEM_MC_ADVANCE_RIP();
7406 IEM_MC_END();
7407 return VINF_SUCCESS;
7408
7409 case IEMMODE_64BIT:
7410 IEM_MC_BEGIN(4, 0);
7411 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7412 IEM_MC_ARG(uint64_t, u64Src, 1);
7413 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7414 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7415
7416 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7417 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7418 IEM_MC_REF_EFLAGS(pEFlags);
7419 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7420
7421 IEM_MC_ADVANCE_RIP();
7422 IEM_MC_END();
7423 return VINF_SUCCESS;
7424
7425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7426 }
7427 }
7428 else
7429 {
7430 switch (pVCpu->iem.s.enmEffOpSize)
7431 {
7432 case IEMMODE_16BIT:
7433 IEM_MC_BEGIN(4, 2);
7434 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7435 IEM_MC_ARG(uint16_t, u16Src, 1);
7436 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7437 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7439
7440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7441 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7442 IEM_MC_ASSIGN(cShiftArg, cShift);
7443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7444 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7445 IEM_MC_FETCH_EFLAGS(EFlags);
7446 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7447 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7448
7449 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7450 IEM_MC_COMMIT_EFLAGS(EFlags);
7451 IEM_MC_ADVANCE_RIP();
7452 IEM_MC_END();
7453 return VINF_SUCCESS;
7454
7455 case IEMMODE_32BIT:
7456 IEM_MC_BEGIN(4, 2);
7457 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7458 IEM_MC_ARG(uint32_t, u32Src, 1);
7459 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7460 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7462
7463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7464 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7465 IEM_MC_ASSIGN(cShiftArg, cShift);
7466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7467 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7468 IEM_MC_FETCH_EFLAGS(EFlags);
7469 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7470 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7471
7472 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7473 IEM_MC_COMMIT_EFLAGS(EFlags);
7474 IEM_MC_ADVANCE_RIP();
7475 IEM_MC_END();
7476 return VINF_SUCCESS;
7477
7478 case IEMMODE_64BIT:
7479 IEM_MC_BEGIN(4, 2);
7480 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7481 IEM_MC_ARG(uint64_t, u64Src, 1);
7482 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7483 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7485
7486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7487 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7488 IEM_MC_ASSIGN(cShiftArg, cShift);
7489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7490 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7491 IEM_MC_FETCH_EFLAGS(EFlags);
7492 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7493 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7494
7495 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7496 IEM_MC_COMMIT_EFLAGS(EFlags);
7497 IEM_MC_ADVANCE_RIP();
7498 IEM_MC_END();
7499 return VINF_SUCCESS;
7500
7501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7502 }
7503 }
7504}
7505
7506
7507/**
7508 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7509 */
7510FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7511{
7512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7513 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7514
7515 if (IEM_IS_MODRM_REG_MODE(bRm))
7516 {
7517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7518
7519 switch (pVCpu->iem.s.enmEffOpSize)
7520 {
7521 case IEMMODE_16BIT:
7522 IEM_MC_BEGIN(4, 0);
7523 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7524 IEM_MC_ARG(uint16_t, u16Src, 1);
7525 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7526 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7527
7528 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7529 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7530 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7531 IEM_MC_REF_EFLAGS(pEFlags);
7532 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7533
7534 IEM_MC_ADVANCE_RIP();
7535 IEM_MC_END();
7536 return VINF_SUCCESS;
7537
7538 case IEMMODE_32BIT:
7539 IEM_MC_BEGIN(4, 0);
7540 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7541 IEM_MC_ARG(uint32_t, u32Src, 1);
7542 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7543 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7544
7545 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7546 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7547 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7548 IEM_MC_REF_EFLAGS(pEFlags);
7549 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7550
7551 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7552 IEM_MC_ADVANCE_RIP();
7553 IEM_MC_END();
7554 return VINF_SUCCESS;
7555
7556 case IEMMODE_64BIT:
7557 IEM_MC_BEGIN(4, 0);
7558 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7559 IEM_MC_ARG(uint64_t, u64Src, 1);
7560 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7561 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7562
7563 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7564 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7565 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7566 IEM_MC_REF_EFLAGS(pEFlags);
7567 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7568
7569 IEM_MC_ADVANCE_RIP();
7570 IEM_MC_END();
7571 return VINF_SUCCESS;
7572
7573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7574 }
7575 }
7576 else
7577 {
7578 switch (pVCpu->iem.s.enmEffOpSize)
7579 {
7580 case IEMMODE_16BIT:
7581 IEM_MC_BEGIN(4, 2);
7582 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7583 IEM_MC_ARG(uint16_t, u16Src, 1);
7584 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7585 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7587
7588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7590 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7591 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7592 IEM_MC_FETCH_EFLAGS(EFlags);
7593 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7594 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7595
7596 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7597 IEM_MC_COMMIT_EFLAGS(EFlags);
7598 IEM_MC_ADVANCE_RIP();
7599 IEM_MC_END();
7600 return VINF_SUCCESS;
7601
7602 case IEMMODE_32BIT:
7603 IEM_MC_BEGIN(4, 2);
7604 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7605 IEM_MC_ARG(uint32_t, u32Src, 1);
7606 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7607 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7609
7610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7612 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7613 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7614 IEM_MC_FETCH_EFLAGS(EFlags);
7615 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7616 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7617
7618 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7619 IEM_MC_COMMIT_EFLAGS(EFlags);
7620 IEM_MC_ADVANCE_RIP();
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623
7624 case IEMMODE_64BIT:
7625 IEM_MC_BEGIN(4, 2);
7626 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7627 IEM_MC_ARG(uint64_t, u64Src, 1);
7628 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7631
7632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7634 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7635 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7636 IEM_MC_FETCH_EFLAGS(EFlags);
7637 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7638 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7639
7640 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7641 IEM_MC_COMMIT_EFLAGS(EFlags);
7642 IEM_MC_ADVANCE_RIP();
7643 IEM_MC_END();
7644 return VINF_SUCCESS;
7645
7646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7647 }
7648 }
7649}
7650
7651
7652
7653/** Opcode 0x0f 0xa4. */
7654FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7655{
7656 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7657 IEMOP_HLP_MIN_386();
7658 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7659}
7660
7661
7662/** Opcode 0x0f 0xa5. */
7663FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7664{
7665 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7666 IEMOP_HLP_MIN_386();
7667 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7668}
7669
7670
7671/** Opcode 0x0f 0xa8. */
7672FNIEMOP_DEF(iemOp_push_gs)
7673{
7674 IEMOP_MNEMONIC(push_gs, "push gs");
7675 IEMOP_HLP_MIN_386();
7676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7677 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
7678}
7679
7680
7681/** Opcode 0x0f 0xa9. */
7682FNIEMOP_DEF(iemOp_pop_gs)
7683{
7684 IEMOP_MNEMONIC(pop_gs, "pop gs");
7685 IEMOP_HLP_MIN_386();
7686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7687 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
7688}
7689
7690
7691/** Opcode 0x0f 0xaa. */
7692FNIEMOP_DEF(iemOp_rsm)
7693{
7694 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
7695 IEMOP_HLP_MIN_386(); /* 386SL and later. */
7696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7697 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
7698}
7699
7700
7701
7702/** Opcode 0x0f 0xab. */
7703FNIEMOP_DEF(iemOp_bts_Ev_Gv)
7704{
7705 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
7706 IEMOP_HLP_MIN_386();
7707 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
7708}
7709
7710
7711/** Opcode 0x0f 0xac. */
7712FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
7713{
7714 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
7715 IEMOP_HLP_MIN_386();
7716 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7717}
7718
7719
7720/** Opcode 0x0f 0xad. */
7721FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
7722{
7723 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
7724 IEMOP_HLP_MIN_386();
7725 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7726}
7727
7728
7729/** Opcode 0x0f 0xae mem/0. */
7730FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
7731{
7732 IEMOP_MNEMONIC(fxsave, "fxsave m512");
7733 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7734 return IEMOP_RAISE_INVALID_OPCODE();
7735
7736 IEM_MC_BEGIN(3, 1);
7737 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7738 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7739 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7742 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7743 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7744 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
7745 IEM_MC_END();
7746 return VINF_SUCCESS;
7747}
7748
7749
7750/** Opcode 0x0f 0xae mem/1. */
7751FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
7752{
7753 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
7754 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7755 return IEMOP_RAISE_INVALID_OPCODE();
7756
7757 IEM_MC_BEGIN(3, 1);
7758 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7759 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7760 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7763 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7764 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7765 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7766 IEM_MC_END();
7767 return VINF_SUCCESS;
7768}
7769
7770
7771/**
7772 * @opmaps grp15
7773 * @opcode !11/2
7774 * @oppfx none
7775 * @opcpuid sse
7776 * @opgroup og_sse_mxcsrsm
7777 * @opxcpttype 5
7778 * @optest op1=0 -> mxcsr=0
7779 * @optest op1=0x2083 -> mxcsr=0x2083
7780 * @optest op1=0xfffffffe -> value.xcpt=0xd
7781 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
7782 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
7783 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
7784 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
7785 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
7786 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7787 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7788 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7789 */
7790FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
7791{
7792 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7793 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7794 return IEMOP_RAISE_INVALID_OPCODE();
7795
7796 IEM_MC_BEGIN(2, 0);
7797 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7798 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7801 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7802 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7803 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
7804 IEM_MC_END();
7805 return VINF_SUCCESS;
7806}
7807
7808
7809/**
7810 * @opmaps grp15
7811 * @opcode !11/3
7812 * @oppfx none
7813 * @opcpuid sse
7814 * @opgroup og_sse_mxcsrsm
7815 * @opxcpttype 5
7816 * @optest mxcsr=0 -> op1=0
7817 * @optest mxcsr=0x2083 -> op1=0x2083
7818 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
7819 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
7820 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
7821 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
7822 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
7823 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7824 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7825 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7826 */
7827FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
7828{
7829 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7830 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7831 return IEMOP_RAISE_INVALID_OPCODE();
7832
7833 IEM_MC_BEGIN(2, 0);
7834 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7835 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7838 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7839 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7840 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
7841 IEM_MC_END();
7842 return VINF_SUCCESS;
7843}
7844
7845
7846/**
7847 * @opmaps grp15
7848 * @opcode !11/4
7849 * @oppfx none
7850 * @opcpuid xsave
7851 * @opgroup og_system
7852 * @opxcpttype none
7853 */
7854FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
7855{
7856 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
7857 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7858 return IEMOP_RAISE_INVALID_OPCODE();
7859
7860 IEM_MC_BEGIN(3, 0);
7861 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7862 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7863 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7866 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7867 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7868 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
7869 IEM_MC_END();
7870 return VINF_SUCCESS;
7871}
7872
7873
7874/**
7875 * @opmaps grp15
7876 * @opcode !11/5
7877 * @oppfx none
7878 * @opcpuid xsave
7879 * @opgroup og_system
7880 * @opxcpttype none
7881 */
7882FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
7883{
7884 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
7885 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7886 return IEMOP_RAISE_INVALID_OPCODE();
7887
7888 IEM_MC_BEGIN(3, 0);
7889 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7890 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7891 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7894 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7895 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7896 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7897 IEM_MC_END();
7898 return VINF_SUCCESS;
7899}
7900
7901/** Opcode 0x0f 0xae mem/6. */
7902FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
7903
7904/**
7905 * @opmaps grp15
7906 * @opcode !11/7
7907 * @oppfx none
7908 * @opcpuid clfsh
7909 * @opgroup og_cachectl
7910 * @optest op1=1 ->
7911 */
7912FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
7913{
7914 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7915 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
7916 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7917
7918 IEM_MC_BEGIN(2, 0);
7919 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7920 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7923 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7924 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7925 IEM_MC_END();
7926 return VINF_SUCCESS;
7927}
7928
7929/**
7930 * @opmaps grp15
7931 * @opcode !11/7
7932 * @oppfx 0x66
7933 * @opcpuid clflushopt
7934 * @opgroup og_cachectl
7935 * @optest op1=1 ->
7936 */
7937FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
7938{
7939 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7940 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
7941 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7942
7943 IEM_MC_BEGIN(2, 0);
7944 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7945 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7948 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7949 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7950 IEM_MC_END();
7951 return VINF_SUCCESS;
7952}
7953
7954
7955/** Opcode 0x0f 0xae 11b/5. */
7956FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7957{
7958 RT_NOREF_PV(bRm);
7959 IEMOP_MNEMONIC(lfence, "lfence");
7960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7961 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7962 return IEMOP_RAISE_INVALID_OPCODE();
7963
7964 IEM_MC_BEGIN(0, 0);
7965#ifndef RT_ARCH_ARM64
7966 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7967#endif
7968 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7969#ifndef RT_ARCH_ARM64
7970 else
7971 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7972#endif
7973 IEM_MC_ADVANCE_RIP();
7974 IEM_MC_END();
7975 return VINF_SUCCESS;
7976}
7977
7978
7979/** Opcode 0x0f 0xae 11b/6. */
7980FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7981{
7982 RT_NOREF_PV(bRm);
7983 IEMOP_MNEMONIC(mfence, "mfence");
7984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7985 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7986 return IEMOP_RAISE_INVALID_OPCODE();
7987
7988 IEM_MC_BEGIN(0, 0);
7989#ifndef RT_ARCH_ARM64
7990 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7991#endif
7992 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7993#ifndef RT_ARCH_ARM64
7994 else
7995 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7996#endif
7997 IEM_MC_ADVANCE_RIP();
7998 IEM_MC_END();
7999 return VINF_SUCCESS;
8000}
8001
8002
8003/** Opcode 0x0f 0xae 11b/7. */
8004FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
8005{
8006 RT_NOREF_PV(bRm);
8007 IEMOP_MNEMONIC(sfence, "sfence");
8008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8009 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8010 return IEMOP_RAISE_INVALID_OPCODE();
8011
8012 IEM_MC_BEGIN(0, 0);
8013#ifndef RT_ARCH_ARM64
8014 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8015#endif
8016 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
8017#ifndef RT_ARCH_ARM64
8018 else
8019 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8020#endif
8021 IEM_MC_ADVANCE_RIP();
8022 IEM_MC_END();
8023 return VINF_SUCCESS;
8024}
8025
8026
8027/** Opcode 0xf3 0x0f 0xae 11b/0. */
8028FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
8029{
8030 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
8031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8032 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8033 {
8034 IEM_MC_BEGIN(1, 0);
8035 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8036 IEM_MC_ARG(uint64_t, u64Dst, 0);
8037 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
8038 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8039 IEM_MC_ADVANCE_RIP();
8040 IEM_MC_END();
8041 }
8042 else
8043 {
8044 IEM_MC_BEGIN(1, 0);
8045 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8046 IEM_MC_ARG(uint32_t, u32Dst, 0);
8047 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
8048 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8049 IEM_MC_ADVANCE_RIP();
8050 IEM_MC_END();
8051 }
8052 return VINF_SUCCESS;
8053}
8054
8055
8056/** Opcode 0xf3 0x0f 0xae 11b/1. */
8057FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
8058{
8059 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
8060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8061 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8062 {
8063 IEM_MC_BEGIN(1, 0);
8064 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8065 IEM_MC_ARG(uint64_t, u64Dst, 0);
8066 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
8067 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8068 IEM_MC_ADVANCE_RIP();
8069 IEM_MC_END();
8070 }
8071 else
8072 {
8073 IEM_MC_BEGIN(1, 0);
8074 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8075 IEM_MC_ARG(uint32_t, u32Dst, 0);
8076 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
8077 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8078 IEM_MC_ADVANCE_RIP();
8079 IEM_MC_END();
8080 }
8081 return VINF_SUCCESS;
8082}
8083
8084
8085/** Opcode 0xf3 0x0f 0xae 11b/2. */
8086FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
8087{
8088 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
8089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8090 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8091 {
8092 IEM_MC_BEGIN(1, 0);
8093 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8094 IEM_MC_ARG(uint64_t, u64Dst, 0);
8095 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8096 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8097 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
8098 IEM_MC_ADVANCE_RIP();
8099 IEM_MC_END();
8100 }
8101 else
8102 {
8103 IEM_MC_BEGIN(1, 0);
8104 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8105 IEM_MC_ARG(uint32_t, u32Dst, 0);
8106 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8107 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
8108 IEM_MC_ADVANCE_RIP();
8109 IEM_MC_END();
8110 }
8111 return VINF_SUCCESS;
8112}
8113
8114
8115/** Opcode 0xf3 0x0f 0xae 11b/3. */
8116FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
8117{
8118 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8121 {
8122 IEM_MC_BEGIN(1, 0);
8123 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8124 IEM_MC_ARG(uint64_t, u64Dst, 0);
8125 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8126 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8127 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
8128 IEM_MC_ADVANCE_RIP();
8129 IEM_MC_END();
8130 }
8131 else
8132 {
8133 IEM_MC_BEGIN(1, 0);
8134 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8135 IEM_MC_ARG(uint32_t, u32Dst, 0);
8136 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8137 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
8138 IEM_MC_ADVANCE_RIP();
8139 IEM_MC_END();
8140 }
8141 return VINF_SUCCESS;
8142}
8143
8144
8145/**
8146 * Group 15 jump table for register variant.
8147 */
8148IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
8149{ /* pfx: none, 066h, 0f3h, 0f2h */
8150 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
8151 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
8152 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
8153 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
8154 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8155 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8156 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8157 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8158};
8159AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
8160
8161
8162/**
8163 * Group 15 jump table for memory variant.
8164 */
8165IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
8166{ /* pfx: none, 066h, 0f3h, 0f2h */
8167 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8168 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8169 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8170 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8171 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8172 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8173 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8174 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8175};
8176AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
8177
8178
8179/** Opcode 0x0f 0xae. */
8180FNIEMOP_DEF(iemOp_Grp15)
8181{
8182 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
8183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8184 if (IEM_IS_MODRM_REG_MODE(bRm))
8185 /* register, register */
8186 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8187 + pVCpu->iem.s.idxPrefix], bRm);
8188 /* memory, register */
8189 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8190 + pVCpu->iem.s.idxPrefix], bRm);
8191}
8192
8193
8194/** Opcode 0x0f 0xaf. */
8195FNIEMOP_DEF(iemOp_imul_Gv_Ev)
8196{
8197 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
8198 IEMOP_HLP_MIN_386();
8199 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8200 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
8201}
8202
8203
8204/** Opcode 0x0f 0xb0. */
8205FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
8206{
8207 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
8208 IEMOP_HLP_MIN_486();
8209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8210
8211 if (IEM_IS_MODRM_REG_MODE(bRm))
8212 {
8213 IEMOP_HLP_DONE_DECODING();
8214 IEM_MC_BEGIN(4, 0);
8215 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8216 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8217 IEM_MC_ARG(uint8_t, u8Src, 2);
8218 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8219
8220 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8221 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8222 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
8223 IEM_MC_REF_EFLAGS(pEFlags);
8224 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8225 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8226 else
8227 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8228
8229 IEM_MC_ADVANCE_RIP();
8230 IEM_MC_END();
8231 }
8232 else
8233 {
8234 IEM_MC_BEGIN(4, 3);
8235 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8236 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8237 IEM_MC_ARG(uint8_t, u8Src, 2);
8238 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8240 IEM_MC_LOCAL(uint8_t, u8Al);
8241
8242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8243 IEMOP_HLP_DONE_DECODING();
8244 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8245 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8246 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
8247 IEM_MC_FETCH_EFLAGS(EFlags);
8248 IEM_MC_REF_LOCAL(pu8Al, u8Al);
8249 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8250 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8251 else
8252 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8253
8254 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8255 IEM_MC_COMMIT_EFLAGS(EFlags);
8256 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
8257 IEM_MC_ADVANCE_RIP();
8258 IEM_MC_END();
8259 }
8260 return VINF_SUCCESS;
8261}
8262
8263/** Opcode 0x0f 0xb1. */
8264FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
8265{
8266 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
8267 IEMOP_HLP_MIN_486();
8268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8269
8270 if (IEM_IS_MODRM_REG_MODE(bRm))
8271 {
8272 IEMOP_HLP_DONE_DECODING();
8273 switch (pVCpu->iem.s.enmEffOpSize)
8274 {
8275 case IEMMODE_16BIT:
8276 IEM_MC_BEGIN(4, 0);
8277 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8278 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8279 IEM_MC_ARG(uint16_t, u16Src, 2);
8280 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8281
8282 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8283 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8284 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
8285 IEM_MC_REF_EFLAGS(pEFlags);
8286 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8287 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8288 else
8289 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8290
8291 IEM_MC_ADVANCE_RIP();
8292 IEM_MC_END();
8293 return VINF_SUCCESS;
8294
8295 case IEMMODE_32BIT:
8296 IEM_MC_BEGIN(4, 0);
8297 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8298 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8299 IEM_MC_ARG(uint32_t, u32Src, 2);
8300 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8301
8302 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8303 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8304 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
8305 IEM_MC_REF_EFLAGS(pEFlags);
8306 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8307 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8308 else
8309 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8310
8311 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
8312 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8313 IEM_MC_ADVANCE_RIP();
8314 IEM_MC_END();
8315 return VINF_SUCCESS;
8316
8317 case IEMMODE_64BIT:
8318 IEM_MC_BEGIN(4, 0);
8319 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8320 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8321#ifdef RT_ARCH_X86
8322 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8323#else
8324 IEM_MC_ARG(uint64_t, u64Src, 2);
8325#endif
8326 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8327
8328 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8329 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
8330 IEM_MC_REF_EFLAGS(pEFlags);
8331#ifdef RT_ARCH_X86
8332 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8333 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8334 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8335 else
8336 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8337#else
8338 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8339 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8340 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8341 else
8342 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8343#endif
8344
8345 IEM_MC_ADVANCE_RIP();
8346 IEM_MC_END();
8347 return VINF_SUCCESS;
8348
8349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8350 }
8351 }
8352 else
8353 {
8354 switch (pVCpu->iem.s.enmEffOpSize)
8355 {
8356 case IEMMODE_16BIT:
8357 IEM_MC_BEGIN(4, 3);
8358 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8359 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8360 IEM_MC_ARG(uint16_t, u16Src, 2);
8361 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8363 IEM_MC_LOCAL(uint16_t, u16Ax);
8364
8365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8366 IEMOP_HLP_DONE_DECODING();
8367 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8368 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8369 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
8370 IEM_MC_FETCH_EFLAGS(EFlags);
8371 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
8372 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8373 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8374 else
8375 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8376
8377 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8378 IEM_MC_COMMIT_EFLAGS(EFlags);
8379 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
8380 IEM_MC_ADVANCE_RIP();
8381 IEM_MC_END();
8382 return VINF_SUCCESS;
8383
8384 case IEMMODE_32BIT:
8385 IEM_MC_BEGIN(4, 3);
8386 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8387 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8388 IEM_MC_ARG(uint32_t, u32Src, 2);
8389 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8391 IEM_MC_LOCAL(uint32_t, u32Eax);
8392
8393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8394 IEMOP_HLP_DONE_DECODING();
8395 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8396 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8397 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
8398 IEM_MC_FETCH_EFLAGS(EFlags);
8399 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
8400 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8401 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8402 else
8403 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8404
8405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8406 IEM_MC_COMMIT_EFLAGS(EFlags);
8407 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
8408 IEM_MC_ADVANCE_RIP();
8409 IEM_MC_END();
8410 return VINF_SUCCESS;
8411
8412 case IEMMODE_64BIT:
8413 IEM_MC_BEGIN(4, 3);
8414 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8415 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8416#ifdef RT_ARCH_X86
8417 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8418#else
8419 IEM_MC_ARG(uint64_t, u64Src, 2);
8420#endif
8421 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8423 IEM_MC_LOCAL(uint64_t, u64Rax);
8424
8425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8426 IEMOP_HLP_DONE_DECODING();
8427 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8428 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8429 IEM_MC_FETCH_EFLAGS(EFlags);
8430 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8431#ifdef RT_ARCH_X86
8432 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8433 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8434 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8435 else
8436 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8437#else
8438 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8439 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8440 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8441 else
8442 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8443#endif
8444
8445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8446 IEM_MC_COMMIT_EFLAGS(EFlags);
8447 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8448 IEM_MC_ADVANCE_RIP();
8449 IEM_MC_END();
8450 return VINF_SUCCESS;
8451
8452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8453 }
8454 }
8455}
8456
8457
8458FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8459{
8460 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8461 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8462
8463 switch (pVCpu->iem.s.enmEffOpSize)
8464 {
8465 case IEMMODE_16BIT:
8466 IEM_MC_BEGIN(5, 1);
8467 IEM_MC_ARG(uint16_t, uSel, 0);
8468 IEM_MC_ARG(uint16_t, offSeg, 1);
8469 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8470 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8471 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8472 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8475 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8476 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8477 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8478 IEM_MC_END();
8479 return VINF_SUCCESS;
8480
8481 case IEMMODE_32BIT:
8482 IEM_MC_BEGIN(5, 1);
8483 IEM_MC_ARG(uint16_t, uSel, 0);
8484 IEM_MC_ARG(uint32_t, offSeg, 1);
8485 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8486 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8487 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8488 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8491 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8492 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8493 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8494 IEM_MC_END();
8495 return VINF_SUCCESS;
8496
8497 case IEMMODE_64BIT:
8498 IEM_MC_BEGIN(5, 1);
8499 IEM_MC_ARG(uint16_t, uSel, 0);
8500 IEM_MC_ARG(uint64_t, offSeg, 1);
8501 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8502 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8503 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8504 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8507 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8508 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8509 else
8510 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8511 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8512 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8513 IEM_MC_END();
8514 return VINF_SUCCESS;
8515
8516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8517 }
8518}
8519
8520
8521/** Opcode 0x0f 0xb2. */
8522FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8523{
8524 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8525 IEMOP_HLP_MIN_386();
8526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8527 if (IEM_IS_MODRM_REG_MODE(bRm))
8528 return IEMOP_RAISE_INVALID_OPCODE();
8529 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8530}
8531
8532
8533/** Opcode 0x0f 0xb3. */
8534FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8535{
8536 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8537 IEMOP_HLP_MIN_386();
8538 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8539}
8540
8541
8542/** Opcode 0x0f 0xb4. */
8543FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8544{
8545 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8546 IEMOP_HLP_MIN_386();
8547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8548 if (IEM_IS_MODRM_REG_MODE(bRm))
8549 return IEMOP_RAISE_INVALID_OPCODE();
8550 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8551}
8552
8553
8554/** Opcode 0x0f 0xb5. */
8555FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8556{
8557 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8558 IEMOP_HLP_MIN_386();
8559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8560 if (IEM_IS_MODRM_REG_MODE(bRm))
8561 return IEMOP_RAISE_INVALID_OPCODE();
8562 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8563}
8564
8565
8566/** Opcode 0x0f 0xb6. */
8567FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8568{
8569 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8570 IEMOP_HLP_MIN_386();
8571
8572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8573
8574 /*
8575 * If rm is denoting a register, no more instruction bytes.
8576 */
8577 if (IEM_IS_MODRM_REG_MODE(bRm))
8578 {
8579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8580 switch (pVCpu->iem.s.enmEffOpSize)
8581 {
8582 case IEMMODE_16BIT:
8583 IEM_MC_BEGIN(0, 1);
8584 IEM_MC_LOCAL(uint16_t, u16Value);
8585 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8586 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8587 IEM_MC_ADVANCE_RIP();
8588 IEM_MC_END();
8589 return VINF_SUCCESS;
8590
8591 case IEMMODE_32BIT:
8592 IEM_MC_BEGIN(0, 1);
8593 IEM_MC_LOCAL(uint32_t, u32Value);
8594 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8595 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8596 IEM_MC_ADVANCE_RIP();
8597 IEM_MC_END();
8598 return VINF_SUCCESS;
8599
8600 case IEMMODE_64BIT:
8601 IEM_MC_BEGIN(0, 1);
8602 IEM_MC_LOCAL(uint64_t, u64Value);
8603 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8604 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8605 IEM_MC_ADVANCE_RIP();
8606 IEM_MC_END();
8607 return VINF_SUCCESS;
8608
8609 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8610 }
8611 }
8612 else
8613 {
8614 /*
8615 * We're loading a register from memory.
8616 */
8617 switch (pVCpu->iem.s.enmEffOpSize)
8618 {
8619 case IEMMODE_16BIT:
8620 IEM_MC_BEGIN(0, 2);
8621 IEM_MC_LOCAL(uint16_t, u16Value);
8622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8625 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8626 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8627 IEM_MC_ADVANCE_RIP();
8628 IEM_MC_END();
8629 return VINF_SUCCESS;
8630
8631 case IEMMODE_32BIT:
8632 IEM_MC_BEGIN(0, 2);
8633 IEM_MC_LOCAL(uint32_t, u32Value);
8634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8637 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8638 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8639 IEM_MC_ADVANCE_RIP();
8640 IEM_MC_END();
8641 return VINF_SUCCESS;
8642
8643 case IEMMODE_64BIT:
8644 IEM_MC_BEGIN(0, 2);
8645 IEM_MC_LOCAL(uint64_t, u64Value);
8646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8650 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8651 IEM_MC_ADVANCE_RIP();
8652 IEM_MC_END();
8653 return VINF_SUCCESS;
8654
8655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8656 }
8657 }
8658}
8659
8660
8661/** Opcode 0x0f 0xb7. */
8662FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8663{
8664 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8665 IEMOP_HLP_MIN_386();
8666
8667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8668
8669 /** @todo Not entirely sure how the operand size prefix is handled here,
8670 * assuming that it will be ignored. Would be nice to have a few
8671 * test for this. */
8672 /*
8673 * If rm is denoting a register, no more instruction bytes.
8674 */
8675 if (IEM_IS_MODRM_REG_MODE(bRm))
8676 {
8677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8678 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8679 {
8680 IEM_MC_BEGIN(0, 1);
8681 IEM_MC_LOCAL(uint32_t, u32Value);
8682 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8683 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8684 IEM_MC_ADVANCE_RIP();
8685 IEM_MC_END();
8686 }
8687 else
8688 {
8689 IEM_MC_BEGIN(0, 1);
8690 IEM_MC_LOCAL(uint64_t, u64Value);
8691 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8692 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8693 IEM_MC_ADVANCE_RIP();
8694 IEM_MC_END();
8695 }
8696 }
8697 else
8698 {
8699 /*
8700 * We're loading a register from memory.
8701 */
8702 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8703 {
8704 IEM_MC_BEGIN(0, 2);
8705 IEM_MC_LOCAL(uint32_t, u32Value);
8706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8709 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8710 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8711 IEM_MC_ADVANCE_RIP();
8712 IEM_MC_END();
8713 }
8714 else
8715 {
8716 IEM_MC_BEGIN(0, 2);
8717 IEM_MC_LOCAL(uint64_t, u64Value);
8718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8721 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8722 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8723 IEM_MC_ADVANCE_RIP();
8724 IEM_MC_END();
8725 }
8726 }
8727 return VINF_SUCCESS;
8728}
8729
8730
8731/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
8732FNIEMOP_UD_STUB(iemOp_jmpe);
8733
8734
8735/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
8736FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
8737{
8738 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8739 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
8740 return iemOp_InvalidNeedRM(pVCpu);
8741#ifndef TST_IEM_CHECK_MC
8742# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
8743 static const IEMOPBINSIZES s_Native =
8744 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
8745# endif
8746 static const IEMOPBINSIZES s_Fallback =
8747 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
8748#endif
8749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
8750}
8751
8752
8753/**
8754 * @opcode 0xb9
8755 * @opinvalid intel-modrm
8756 * @optest ->
8757 */
8758FNIEMOP_DEF(iemOp_Grp10)
8759{
8760 /*
8761 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
8762 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
8763 */
8764 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
8765 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
8766 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
8767}
8768
8769
8770/** Opcode 0x0f 0xba. */
8771FNIEMOP_DEF(iemOp_Grp8)
8772{
8773 IEMOP_HLP_MIN_386();
8774 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8775 PCIEMOPBINSIZES pImpl;
8776 switch (IEM_GET_MODRM_REG_8(bRm))
8777 {
8778 case 0: case 1: case 2: case 3:
8779 /* Both AMD and Intel want full modr/m decoding and imm8. */
8780 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
8781 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
8782 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
8783 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
8784 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
8785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8786 }
8787 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8788
8789 if (IEM_IS_MODRM_REG_MODE(bRm))
8790 {
8791 /* register destination. */
8792 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8794
8795 switch (pVCpu->iem.s.enmEffOpSize)
8796 {
8797 case IEMMODE_16BIT:
8798 IEM_MC_BEGIN(3, 0);
8799 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8800 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
8801 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8802
8803 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8804 IEM_MC_REF_EFLAGS(pEFlags);
8805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8806
8807 IEM_MC_ADVANCE_RIP();
8808 IEM_MC_END();
8809 return VINF_SUCCESS;
8810
8811 case IEMMODE_32BIT:
8812 IEM_MC_BEGIN(3, 0);
8813 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8814 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
8815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8816
8817 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8818 IEM_MC_REF_EFLAGS(pEFlags);
8819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8820
8821 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8822 IEM_MC_ADVANCE_RIP();
8823 IEM_MC_END();
8824 return VINF_SUCCESS;
8825
8826 case IEMMODE_64BIT:
8827 IEM_MC_BEGIN(3, 0);
8828 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8829 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
8830 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8831
8832 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8833 IEM_MC_REF_EFLAGS(pEFlags);
8834 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8835
8836 IEM_MC_ADVANCE_RIP();
8837 IEM_MC_END();
8838 return VINF_SUCCESS;
8839
8840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8841 }
8842 }
8843 else
8844 {
8845 /* memory destination. */
8846
8847 uint32_t fAccess;
8848 if (pImpl->pfnLockedU16)
8849 fAccess = IEM_ACCESS_DATA_RW;
8850 else /* BT */
8851 fAccess = IEM_ACCESS_DATA_R;
8852
8853 /** @todo test negative bit offsets! */
8854 switch (pVCpu->iem.s.enmEffOpSize)
8855 {
8856 case IEMMODE_16BIT:
8857 IEM_MC_BEGIN(3, 1);
8858 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8859 IEM_MC_ARG(uint16_t, u16Src, 1);
8860 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8862
8863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8864 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8865 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
8866 if (pImpl->pfnLockedU16)
8867 IEMOP_HLP_DONE_DECODING();
8868 else
8869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8870 IEM_MC_FETCH_EFLAGS(EFlags);
8871 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8872 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8873 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8874 else
8875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8876 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8877
8878 IEM_MC_COMMIT_EFLAGS(EFlags);
8879 IEM_MC_ADVANCE_RIP();
8880 IEM_MC_END();
8881 return VINF_SUCCESS;
8882
8883 case IEMMODE_32BIT:
8884 IEM_MC_BEGIN(3, 1);
8885 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8886 IEM_MC_ARG(uint32_t, u32Src, 1);
8887 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8889
8890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8891 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8892 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
8893 if (pImpl->pfnLockedU16)
8894 IEMOP_HLP_DONE_DECODING();
8895 else
8896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8897 IEM_MC_FETCH_EFLAGS(EFlags);
8898 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8899 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8901 else
8902 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8903 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8904
8905 IEM_MC_COMMIT_EFLAGS(EFlags);
8906 IEM_MC_ADVANCE_RIP();
8907 IEM_MC_END();
8908 return VINF_SUCCESS;
8909
8910 case IEMMODE_64BIT:
8911 IEM_MC_BEGIN(3, 1);
8912 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8913 IEM_MC_ARG(uint64_t, u64Src, 1);
8914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8916
8917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8918 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8919 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
8920 if (pImpl->pfnLockedU16)
8921 IEMOP_HLP_DONE_DECODING();
8922 else
8923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8924 IEM_MC_FETCH_EFLAGS(EFlags);
8925 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8926 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8928 else
8929 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8930 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8931
8932 IEM_MC_COMMIT_EFLAGS(EFlags);
8933 IEM_MC_ADVANCE_RIP();
8934 IEM_MC_END();
8935 return VINF_SUCCESS;
8936
8937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8938 }
8939 }
8940}
8941
8942
8943/** Opcode 0x0f 0xbb. */
8944FNIEMOP_DEF(iemOp_btc_Ev_Gv)
8945{
8946 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
8947 IEMOP_HLP_MIN_386();
8948 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
8949}
8950
8951
8952/**
8953 * Common worker for BSF and BSR instructions.
8954 *
8955 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
8956 * the destination register, which means that for 32-bit operations the high
8957 * bits must be left alone.
8958 *
8959 * @param pImpl Pointer to the instruction implementation (assembly).
8960 */
8961FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
8962{
8963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8964
8965 /*
8966 * If rm is denoting a register, no more instruction bytes.
8967 */
8968 if (IEM_IS_MODRM_REG_MODE(bRm))
8969 {
8970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8971 switch (pVCpu->iem.s.enmEffOpSize)
8972 {
8973 case IEMMODE_16BIT:
8974 IEM_MC_BEGIN(3, 0);
8975 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8976 IEM_MC_ARG(uint16_t, u16Src, 1);
8977 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8978
8979 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8980 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8981 IEM_MC_REF_EFLAGS(pEFlags);
8982 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8983
8984 IEM_MC_ADVANCE_RIP();
8985 IEM_MC_END();
8986 break;
8987
8988 case IEMMODE_32BIT:
8989 IEM_MC_BEGIN(3, 0);
8990 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8991 IEM_MC_ARG(uint32_t, u32Src, 1);
8992 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8993
8994 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8995 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8996 IEM_MC_REF_EFLAGS(pEFlags);
8997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8998 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8999 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9000 IEM_MC_ENDIF();
9001 IEM_MC_ADVANCE_RIP();
9002 IEM_MC_END();
9003 break;
9004
9005 case IEMMODE_64BIT:
9006 IEM_MC_BEGIN(3, 0);
9007 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9008 IEM_MC_ARG(uint64_t, u64Src, 1);
9009 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9010
9011 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9012 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9013 IEM_MC_REF_EFLAGS(pEFlags);
9014 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9015
9016 IEM_MC_ADVANCE_RIP();
9017 IEM_MC_END();
9018 break;
9019 }
9020 }
9021 else
9022 {
9023 /*
9024 * We're accessing memory.
9025 */
9026 switch (pVCpu->iem.s.enmEffOpSize)
9027 {
9028 case IEMMODE_16BIT:
9029 IEM_MC_BEGIN(3, 1);
9030 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9031 IEM_MC_ARG(uint16_t, u16Src, 1);
9032 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9034
9035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9037 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9038 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9039 IEM_MC_REF_EFLAGS(pEFlags);
9040 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9041
9042 IEM_MC_ADVANCE_RIP();
9043 IEM_MC_END();
9044 break;
9045
9046 case IEMMODE_32BIT:
9047 IEM_MC_BEGIN(3, 1);
9048 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9049 IEM_MC_ARG(uint32_t, u32Src, 1);
9050 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9052
9053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9055 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9056 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9057 IEM_MC_REF_EFLAGS(pEFlags);
9058 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9059
9060 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9061 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9062 IEM_MC_ENDIF();
9063 IEM_MC_ADVANCE_RIP();
9064 IEM_MC_END();
9065 break;
9066
9067 case IEMMODE_64BIT:
9068 IEM_MC_BEGIN(3, 1);
9069 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9070 IEM_MC_ARG(uint64_t, u64Src, 1);
9071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9073
9074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9076 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9077 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9078 IEM_MC_REF_EFLAGS(pEFlags);
9079 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9080
9081 IEM_MC_ADVANCE_RIP();
9082 IEM_MC_END();
9083 break;
9084 }
9085 }
9086 return VINF_SUCCESS;
9087}
9088
9089
9090/** Opcode 0x0f 0xbc. */
9091FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
9092{
9093 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
9094 IEMOP_HLP_MIN_386();
9095 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9096 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
9097}
9098
9099
9100/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
9101FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
9102{
9103 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9104 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
9105 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9106
9107#ifndef TST_IEM_CHECK_MC
9108 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
9109 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
9110 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
9111 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
9112 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
9113 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
9114 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
9115 {
9116 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
9117 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
9118 };
9119#endif
9120 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9121 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9122 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9123}
9124
9125
9126/** Opcode 0x0f 0xbd. */
9127FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
9128{
9129 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
9130 IEMOP_HLP_MIN_386();
9131 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9132 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
9133}
9134
9135
9136/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
9137FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
9138{
9139 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9140 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
9141 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9142
9143#ifndef TST_IEM_CHECK_MC
9144 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
9145 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
9146 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
9147 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
9148 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
9149 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
9150 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
9151 {
9152 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
9153 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
9154 };
9155#endif
9156 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9157 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9158 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9159}
9160
9161
9162
9163/** Opcode 0x0f 0xbe. */
9164FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
9165{
9166 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
9167 IEMOP_HLP_MIN_386();
9168
9169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9170
9171 /*
9172 * If rm is denoting a register, no more instruction bytes.
9173 */
9174 if (IEM_IS_MODRM_REG_MODE(bRm))
9175 {
9176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9177 switch (pVCpu->iem.s.enmEffOpSize)
9178 {
9179 case IEMMODE_16BIT:
9180 IEM_MC_BEGIN(0, 1);
9181 IEM_MC_LOCAL(uint16_t, u16Value);
9182 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9183 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9184 IEM_MC_ADVANCE_RIP();
9185 IEM_MC_END();
9186 return VINF_SUCCESS;
9187
9188 case IEMMODE_32BIT:
9189 IEM_MC_BEGIN(0, 1);
9190 IEM_MC_LOCAL(uint32_t, u32Value);
9191 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9192 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9193 IEM_MC_ADVANCE_RIP();
9194 IEM_MC_END();
9195 return VINF_SUCCESS;
9196
9197 case IEMMODE_64BIT:
9198 IEM_MC_BEGIN(0, 1);
9199 IEM_MC_LOCAL(uint64_t, u64Value);
9200 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9201 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9202 IEM_MC_ADVANCE_RIP();
9203 IEM_MC_END();
9204 return VINF_SUCCESS;
9205
9206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9207 }
9208 }
9209 else
9210 {
9211 /*
9212 * We're loading a register from memory.
9213 */
9214 switch (pVCpu->iem.s.enmEffOpSize)
9215 {
9216 case IEMMODE_16BIT:
9217 IEM_MC_BEGIN(0, 2);
9218 IEM_MC_LOCAL(uint16_t, u16Value);
9219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9222 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9223 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9224 IEM_MC_ADVANCE_RIP();
9225 IEM_MC_END();
9226 return VINF_SUCCESS;
9227
9228 case IEMMODE_32BIT:
9229 IEM_MC_BEGIN(0, 2);
9230 IEM_MC_LOCAL(uint32_t, u32Value);
9231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9234 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9235 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9236 IEM_MC_ADVANCE_RIP();
9237 IEM_MC_END();
9238 return VINF_SUCCESS;
9239
9240 case IEMMODE_64BIT:
9241 IEM_MC_BEGIN(0, 2);
9242 IEM_MC_LOCAL(uint64_t, u64Value);
9243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9246 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9247 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9248 IEM_MC_ADVANCE_RIP();
9249 IEM_MC_END();
9250 return VINF_SUCCESS;
9251
9252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9253 }
9254 }
9255}
9256
9257
9258/** Opcode 0x0f 0xbf. */
9259FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
9260{
9261 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
9262 IEMOP_HLP_MIN_386();
9263
9264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9265
9266 /** @todo Not entirely sure how the operand size prefix is handled here,
9267 * assuming that it will be ignored. Would be nice to have a few
9268 * test for this. */
9269 /*
9270 * If rm is denoting a register, no more instruction bytes.
9271 */
9272 if (IEM_IS_MODRM_REG_MODE(bRm))
9273 {
9274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9275 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9276 {
9277 IEM_MC_BEGIN(0, 1);
9278 IEM_MC_LOCAL(uint32_t, u32Value);
9279 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9280 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9281 IEM_MC_ADVANCE_RIP();
9282 IEM_MC_END();
9283 }
9284 else
9285 {
9286 IEM_MC_BEGIN(0, 1);
9287 IEM_MC_LOCAL(uint64_t, u64Value);
9288 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9289 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9290 IEM_MC_ADVANCE_RIP();
9291 IEM_MC_END();
9292 }
9293 }
9294 else
9295 {
9296 /*
9297 * We're loading a register from memory.
9298 */
9299 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9300 {
9301 IEM_MC_BEGIN(0, 2);
9302 IEM_MC_LOCAL(uint32_t, u32Value);
9303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9306 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9307 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9308 IEM_MC_ADVANCE_RIP();
9309 IEM_MC_END();
9310 }
9311 else
9312 {
9313 IEM_MC_BEGIN(0, 2);
9314 IEM_MC_LOCAL(uint64_t, u64Value);
9315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9318 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9319 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9320 IEM_MC_ADVANCE_RIP();
9321 IEM_MC_END();
9322 }
9323 }
9324 return VINF_SUCCESS;
9325}
9326
9327
9328/** Opcode 0x0f 0xc0. */
9329FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
9330{
9331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9332 IEMOP_HLP_MIN_486();
9333 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
9334
9335 /*
9336 * If rm is denoting a register, no more instruction bytes.
9337 */
9338 if (IEM_IS_MODRM_REG_MODE(bRm))
9339 {
9340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9341
9342 IEM_MC_BEGIN(3, 0);
9343 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9344 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9346
9347 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9348 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9349 IEM_MC_REF_EFLAGS(pEFlags);
9350 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9351
9352 IEM_MC_ADVANCE_RIP();
9353 IEM_MC_END();
9354 }
9355 else
9356 {
9357 /*
9358 * We're accessing memory.
9359 */
9360 IEM_MC_BEGIN(3, 3);
9361 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9362 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9363 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9364 IEM_MC_LOCAL(uint8_t, u8RegCopy);
9365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9366
9367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9368 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9369 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9370 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
9371 IEM_MC_FETCH_EFLAGS(EFlags);
9372 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9373 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9374 else
9375 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
9376
9377 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9378 IEM_MC_COMMIT_EFLAGS(EFlags);
9379 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
9380 IEM_MC_ADVANCE_RIP();
9381 IEM_MC_END();
9382 return VINF_SUCCESS;
9383 }
9384 return VINF_SUCCESS;
9385}
9386
9387
9388/** Opcode 0x0f 0xc1. */
9389FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
9390{
9391 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
9392 IEMOP_HLP_MIN_486();
9393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9394
9395 /*
9396 * If rm is denoting a register, no more instruction bytes.
9397 */
9398 if (IEM_IS_MODRM_REG_MODE(bRm))
9399 {
9400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9401
9402 switch (pVCpu->iem.s.enmEffOpSize)
9403 {
9404 case IEMMODE_16BIT:
9405 IEM_MC_BEGIN(3, 0);
9406 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9407 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9408 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9409
9410 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9411 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9412 IEM_MC_REF_EFLAGS(pEFlags);
9413 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9414
9415 IEM_MC_ADVANCE_RIP();
9416 IEM_MC_END();
9417 return VINF_SUCCESS;
9418
9419 case IEMMODE_32BIT:
9420 IEM_MC_BEGIN(3, 0);
9421 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9422 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9423 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9424
9425 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9426 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9427 IEM_MC_REF_EFLAGS(pEFlags);
9428 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9429
9430 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9431 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9432 IEM_MC_ADVANCE_RIP();
9433 IEM_MC_END();
9434 return VINF_SUCCESS;
9435
9436 case IEMMODE_64BIT:
9437 IEM_MC_BEGIN(3, 0);
9438 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9439 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9440 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9441
9442 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9443 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9444 IEM_MC_REF_EFLAGS(pEFlags);
9445 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9446
9447 IEM_MC_ADVANCE_RIP();
9448 IEM_MC_END();
9449 return VINF_SUCCESS;
9450
9451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9452 }
9453 }
9454 else
9455 {
9456 /*
9457 * We're accessing memory.
9458 */
9459 switch (pVCpu->iem.s.enmEffOpSize)
9460 {
9461 case IEMMODE_16BIT:
9462 IEM_MC_BEGIN(3, 3);
9463 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9464 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9465 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9466 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9468
9469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9470 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9471 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9472 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9473 IEM_MC_FETCH_EFLAGS(EFlags);
9474 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9475 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9476 else
9477 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9478
9479 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9480 IEM_MC_COMMIT_EFLAGS(EFlags);
9481 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9482 IEM_MC_ADVANCE_RIP();
9483 IEM_MC_END();
9484 return VINF_SUCCESS;
9485
9486 case IEMMODE_32BIT:
9487 IEM_MC_BEGIN(3, 3);
9488 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9489 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9490 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9491 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9493
9494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9495 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9496 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9497 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9498 IEM_MC_FETCH_EFLAGS(EFlags);
9499 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9500 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9501 else
9502 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9503
9504 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9505 IEM_MC_COMMIT_EFLAGS(EFlags);
9506 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9507 IEM_MC_ADVANCE_RIP();
9508 IEM_MC_END();
9509 return VINF_SUCCESS;
9510
9511 case IEMMODE_64BIT:
9512 IEM_MC_BEGIN(3, 3);
9513 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9514 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9515 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9516 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9518
9519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9520 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9521 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9522 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9523 IEM_MC_FETCH_EFLAGS(EFlags);
9524 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9525 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9526 else
9527 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9528
9529 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9530 IEM_MC_COMMIT_EFLAGS(EFlags);
9531 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9532 IEM_MC_ADVANCE_RIP();
9533 IEM_MC_END();
9534 return VINF_SUCCESS;
9535
9536 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9537 }
9538 }
9539}
9540
9541
9542/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9543FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9544/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9545FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9546/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9547FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9548/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9549FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9550
9551
9552/** Opcode 0x0f 0xc3. */
9553FNIEMOP_DEF(iemOp_movnti_My_Gy)
9554{
9555 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9556
9557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9558
9559 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9560 if (IEM_IS_MODRM_MEM_MODE(bRm))
9561 {
9562 switch (pVCpu->iem.s.enmEffOpSize)
9563 {
9564 case IEMMODE_32BIT:
9565 IEM_MC_BEGIN(0, 2);
9566 IEM_MC_LOCAL(uint32_t, u32Value);
9567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9568
9569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9571 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9572 return IEMOP_RAISE_INVALID_OPCODE();
9573
9574 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9575 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9576 IEM_MC_ADVANCE_RIP();
9577 IEM_MC_END();
9578 break;
9579
9580 case IEMMODE_64BIT:
9581 IEM_MC_BEGIN(0, 2);
9582 IEM_MC_LOCAL(uint64_t, u64Value);
9583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9584
9585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9587 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9588 return IEMOP_RAISE_INVALID_OPCODE();
9589
9590 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9591 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9592 IEM_MC_ADVANCE_RIP();
9593 IEM_MC_END();
9594 break;
9595
9596 case IEMMODE_16BIT:
9597 /** @todo check this form. */
9598 return IEMOP_RAISE_INVALID_OPCODE();
9599 }
9600 }
9601 else
9602 return IEMOP_RAISE_INVALID_OPCODE();
9603 return VINF_SUCCESS;
9604}
9605/* Opcode 0x66 0x0f 0xc3 - invalid */
9606/* Opcode 0xf3 0x0f 0xc3 - invalid */
9607/* Opcode 0xf2 0x0f 0xc3 - invalid */
9608
9609/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9610FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9611/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9612FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9613/* Opcode 0xf3 0x0f 0xc4 - invalid */
9614/* Opcode 0xf2 0x0f 0xc4 - invalid */
9615
9616/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9617FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9618/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9619FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9620/* Opcode 0xf3 0x0f 0xc5 - invalid */
9621/* Opcode 0xf2 0x0f 0xc5 - invalid */
9622
9623/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9624FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
9625/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
9626FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
9627/* Opcode 0xf3 0x0f 0xc6 - invalid */
9628/* Opcode 0xf2 0x0f 0xc6 - invalid */
9629
9630
9631/** Opcode 0x0f 0xc7 !11/1. */
9632FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
9633{
9634 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
9635
9636 IEM_MC_BEGIN(4, 3);
9637 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
9638 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
9639 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
9640 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9641 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
9642 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
9643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9644
9645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9646 IEMOP_HLP_DONE_DECODING();
9647 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9648
9649 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
9650 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
9651 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
9652
9653 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
9654 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
9655 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
9656
9657 IEM_MC_FETCH_EFLAGS(EFlags);
9658 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9659 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9660 else
9661 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9662
9663 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
9664 IEM_MC_COMMIT_EFLAGS(EFlags);
9665 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9666 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
9667 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
9668 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
9669 IEM_MC_ENDIF();
9670 IEM_MC_ADVANCE_RIP();
9671
9672 IEM_MC_END();
9673 return VINF_SUCCESS;
9674}
9675
9676
9677/** Opcode REX.W 0x0f 0xc7 !11/1. */
9678FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
9679{
9680 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
9681 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9682 {
9683#if 0
9684 RT_NOREF(bRm);
9685 IEMOP_BITCH_ABOUT_STUB();
9686 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9687#else
9688 IEM_MC_BEGIN(4, 3);
9689 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
9690 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
9691 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
9692 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9693 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
9694 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
9695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9696
9697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9698 IEMOP_HLP_DONE_DECODING();
9699 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
9700 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9701
9702 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
9703 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
9704 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
9705
9706 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
9707 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
9708 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
9709
9710 IEM_MC_FETCH_EFLAGS(EFlags);
9711# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
9712# if defined(RT_ARCH_AMD64)
9713 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9714# endif
9715 {
9716 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9717 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9718 else
9719 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9720 }
9721# if defined(RT_ARCH_AMD64)
9722 else
9723# endif
9724# endif
9725# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
9726 {
9727 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
9728 accesses and not all all atomic, which works fine on in UNI CPU guest
9729 configuration (ignoring DMA). If guest SMP is active we have no choice
9730 but to use a rendezvous callback here. Sigh. */
9731 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
9732 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9733 else
9734 {
9735 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9736 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
9737 }
9738 }
9739# endif
9740
9741 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
9742 IEM_MC_COMMIT_EFLAGS(EFlags);
9743 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9744 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
9745 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
9746 IEM_MC_ENDIF();
9747 IEM_MC_ADVANCE_RIP();
9748
9749 IEM_MC_END();
9750 return VINF_SUCCESS;
9751#endif
9752 }
9753 Log(("cmpxchg16b -> #UD\n"));
9754 return IEMOP_RAISE_INVALID_OPCODE();
9755}
9756
9757FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
9758{
9759 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
9760 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
9761 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
9762}
9763
9764/** Opcode 0x0f 0xc7 11/6. */
9765FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
9766
9767/** Opcode 0x0f 0xc7 !11/6. */
9768#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9769FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
9770{
9771 IEMOP_MNEMONIC(vmptrld, "vmptrld");
9772 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
9773 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
9774 IEM_MC_BEGIN(2, 0);
9775 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9776 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9778 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9779 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9780 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
9781 IEM_MC_END();
9782 return VINF_SUCCESS;
9783}
9784#else
9785FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
9786#endif
9787
9788/** Opcode 0x66 0x0f 0xc7 !11/6. */
9789#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9790FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
9791{
9792 IEMOP_MNEMONIC(vmclear, "vmclear");
9793 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
9794 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
9795 IEM_MC_BEGIN(2, 0);
9796 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9797 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9799 IEMOP_HLP_DONE_DECODING();
9800 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9801 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
9802 IEM_MC_END();
9803 return VINF_SUCCESS;
9804}
9805#else
9806FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
9807#endif
9808
9809/** Opcode 0xf3 0x0f 0xc7 !11/6. */
9810#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9811FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
9812{
9813 IEMOP_MNEMONIC(vmxon, "vmxon");
9814 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
9815 IEM_MC_BEGIN(2, 0);
9816 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9817 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9819 IEMOP_HLP_DONE_DECODING();
9820 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9821 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
9822 IEM_MC_END();
9823 return VINF_SUCCESS;
9824}
9825#else
9826FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
9827#endif
9828
9829/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
9830#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9831FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
9832{
9833 IEMOP_MNEMONIC(vmptrst, "vmptrst");
9834 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
9835 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
9836 IEM_MC_BEGIN(2, 0);
9837 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9838 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9840 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9841 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9842 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
9843 IEM_MC_END();
9844 return VINF_SUCCESS;
9845}
9846#else
9847FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
9848#endif
9849
9850/** Opcode 0x0f 0xc7 11/7. */
9851FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
9852
9853
9854/**
9855 * Group 9 jump table for register variant.
9856 */
9857IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
9858{ /* pfx: none, 066h, 0f3h, 0f2h */
9859 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9860 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
9861 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9862 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9863 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9864 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9865 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9866 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9867};
9868AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
9869
9870
9871/**
9872 * Group 9 jump table for memory variant.
9873 */
9874IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
9875{ /* pfx: none, 066h, 0f3h, 0f2h */
9876 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9877 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
9878 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9879 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9880 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9881 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9882 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
9883 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9884};
9885AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
9886
9887
9888/** Opcode 0x0f 0xc7. */
9889FNIEMOP_DEF(iemOp_Grp9)
9890{
9891 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
9892 if (IEM_IS_MODRM_REG_MODE(bRm))
9893 /* register, register */
9894 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9895 + pVCpu->iem.s.idxPrefix], bRm);
9896 /* memory, register */
9897 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9898 + pVCpu->iem.s.idxPrefix], bRm);
9899}
9900
9901
9902/**
9903 * Common 'bswap register' helper.
9904 */
9905FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
9906{
9907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9908 switch (pVCpu->iem.s.enmEffOpSize)
9909 {
9910 case IEMMODE_16BIT:
9911 IEM_MC_BEGIN(1, 0);
9912 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9913 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
9914 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
9915 IEM_MC_ADVANCE_RIP();
9916 IEM_MC_END();
9917 return VINF_SUCCESS;
9918
9919 case IEMMODE_32BIT:
9920 IEM_MC_BEGIN(1, 0);
9921 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9922 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9923 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9924 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
9925 IEM_MC_ADVANCE_RIP();
9926 IEM_MC_END();
9927 return VINF_SUCCESS;
9928
9929 case IEMMODE_64BIT:
9930 IEM_MC_BEGIN(1, 0);
9931 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9932 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9933 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
9934 IEM_MC_ADVANCE_RIP();
9935 IEM_MC_END();
9936 return VINF_SUCCESS;
9937
9938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9939 }
9940}
9941
9942
9943/** Opcode 0x0f 0xc8. */
9944FNIEMOP_DEF(iemOp_bswap_rAX_r8)
9945{
9946 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
9947 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
9948 prefix. REX.B is the correct prefix it appears. For a parallel
9949 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
9950 IEMOP_HLP_MIN_486();
9951 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
9952}
9953
9954
9955/** Opcode 0x0f 0xc9. */
9956FNIEMOP_DEF(iemOp_bswap_rCX_r9)
9957{
9958 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
9959 IEMOP_HLP_MIN_486();
9960 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
9961}
9962
9963
9964/** Opcode 0x0f 0xca. */
9965FNIEMOP_DEF(iemOp_bswap_rDX_r10)
9966{
9967 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
9968 IEMOP_HLP_MIN_486();
9969 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
9970}
9971
9972
9973/** Opcode 0x0f 0xcb. */
9974FNIEMOP_DEF(iemOp_bswap_rBX_r11)
9975{
9976 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
9977 IEMOP_HLP_MIN_486();
9978 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
9979}
9980
9981
9982/** Opcode 0x0f 0xcc. */
9983FNIEMOP_DEF(iemOp_bswap_rSP_r12)
9984{
9985 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
9986 IEMOP_HLP_MIN_486();
9987 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
9988}
9989
9990
9991/** Opcode 0x0f 0xcd. */
9992FNIEMOP_DEF(iemOp_bswap_rBP_r13)
9993{
9994 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
9995 IEMOP_HLP_MIN_486();
9996 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
9997}
9998
9999
10000/** Opcode 0x0f 0xce. */
10001FNIEMOP_DEF(iemOp_bswap_rSI_r14)
10002{
10003 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
10004 IEMOP_HLP_MIN_486();
10005 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
10006}
10007
10008
10009/** Opcode 0x0f 0xcf. */
10010FNIEMOP_DEF(iemOp_bswap_rDI_r15)
10011{
10012 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
10013 IEMOP_HLP_MIN_486();
10014 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
10015}
10016
10017
10018/* Opcode 0x0f 0xd0 - invalid */
10019/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
10020FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
10021/* Opcode 0xf3 0x0f 0xd0 - invalid */
10022/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
10023FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
10024
10025/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
10026FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
10027{
10028 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10029 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
10030}
10031
10032/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
10033FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
10034{
10035 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10036 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
10037}
10038
10039/* Opcode 0xf3 0x0f 0xd1 - invalid */
10040/* Opcode 0xf2 0x0f 0xd1 - invalid */
10041
10042/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
10043FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
10044{
10045 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10046 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
10047}
10048
10049
10050/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
10051FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
10052{
10053 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10054 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
10055}
10056
10057
10058/* Opcode 0xf3 0x0f 0xd2 - invalid */
10059/* Opcode 0xf2 0x0f 0xd2 - invalid */
10060
10061/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
10062FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
10063{
10064 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10065 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
10066}
10067
10068
10069/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
10070FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
10071{
10072 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10073 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
10074}
10075
10076
10077/* Opcode 0xf3 0x0f 0xd3 - invalid */
10078/* Opcode 0xf2 0x0f 0xd3 - invalid */
10079
10080
10081/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
10082FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
10083{
10084 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10085 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10086}
10087
10088
10089/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
10090FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
10091{
10092 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10093 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
10094}
10095
10096
10097/* Opcode 0xf3 0x0f 0xd4 - invalid */
10098/* Opcode 0xf2 0x0f 0xd4 - invalid */
10099
10100/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
10101FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
10102{
10103 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10104 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
10105}
10106
10107/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
10108FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
10109{
10110 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10111 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
10112}
10113
10114
10115/* Opcode 0xf3 0x0f 0xd5 - invalid */
10116/* Opcode 0xf2 0x0f 0xd5 - invalid */
10117
10118/* Opcode 0x0f 0xd6 - invalid */
10119
10120/**
10121 * @opcode 0xd6
10122 * @oppfx 0x66
10123 * @opcpuid sse2
10124 * @opgroup og_sse2_pcksclr_datamove
10125 * @opxcpttype none
10126 * @optest op1=-1 op2=2 -> op1=2
10127 * @optest op1=0 op2=-42 -> op1=-42
10128 */
10129FNIEMOP_DEF(iemOp_movq_Wq_Vq)
10130{
10131 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10133 if (IEM_IS_MODRM_REG_MODE(bRm))
10134 {
10135 /*
10136 * Register, register.
10137 */
10138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10139 IEM_MC_BEGIN(0, 2);
10140 IEM_MC_LOCAL(uint64_t, uSrc);
10141
10142 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10143 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
10144
10145 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10146 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
10147
10148 IEM_MC_ADVANCE_RIP();
10149 IEM_MC_END();
10150 }
10151 else
10152 {
10153 /*
10154 * Memory, register.
10155 */
10156 IEM_MC_BEGIN(0, 2);
10157 IEM_MC_LOCAL(uint64_t, uSrc);
10158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10159
10160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10162 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10163 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10164
10165 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10166 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10167
10168 IEM_MC_ADVANCE_RIP();
10169 IEM_MC_END();
10170 }
10171 return VINF_SUCCESS;
10172}
10173
10174
10175/**
10176 * @opcode 0xd6
10177 * @opcodesub 11 mr/reg
10178 * @oppfx f3
10179 * @opcpuid sse2
10180 * @opgroup og_sse2_simdint_datamove
10181 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10182 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10183 */
10184FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
10185{
10186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10187 if (IEM_IS_MODRM_REG_MODE(bRm))
10188 {
10189 /*
10190 * Register, register.
10191 */
10192 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10194 IEM_MC_BEGIN(0, 1);
10195 IEM_MC_LOCAL(uint64_t, uSrc);
10196
10197 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10198 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10199
10200 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
10201 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
10202 IEM_MC_FPU_TO_MMX_MODE();
10203
10204 IEM_MC_ADVANCE_RIP();
10205 IEM_MC_END();
10206 return VINF_SUCCESS;
10207 }
10208
10209 /**
10210 * @opdone
10211 * @opmnemonic udf30fd6mem
10212 * @opcode 0xd6
10213 * @opcodesub !11 mr/reg
10214 * @oppfx f3
10215 * @opunused intel-modrm
10216 * @opcpuid sse
10217 * @optest ->
10218 */
10219 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10220}
10221
10222
10223/**
10224 * @opcode 0xd6
10225 * @opcodesub 11 mr/reg
10226 * @oppfx f2
10227 * @opcpuid sse2
10228 * @opgroup og_sse2_simdint_datamove
10229 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10230 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10231 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
10232 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
10233 * @optest op1=-42 op2=0xfedcba9876543210
10234 * -> op1=0xfedcba9876543210 ftw=0xff
10235 */
10236FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
10237{
10238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10239 if (IEM_IS_MODRM_REG_MODE(bRm))
10240 {
10241 /*
10242 * Register, register.
10243 */
10244 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10246 IEM_MC_BEGIN(0, 1);
10247 IEM_MC_LOCAL(uint64_t, uSrc);
10248
10249 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10250 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10251
10252 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10253 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
10254 IEM_MC_FPU_TO_MMX_MODE();
10255
10256 IEM_MC_ADVANCE_RIP();
10257 IEM_MC_END();
10258 return VINF_SUCCESS;
10259 }
10260
10261 /**
10262 * @opdone
10263 * @opmnemonic udf20fd6mem
10264 * @opcode 0xd6
10265 * @opcodesub !11 mr/reg
10266 * @oppfx f2
10267 * @opunused intel-modrm
10268 * @opcpuid sse
10269 * @optest ->
10270 */
10271 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10272}
10273
10274
10275/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
10276FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
10277{
10278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10279 /* Docs says register only. */
10280 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10281 {
10282 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10283 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
10284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10285 IEM_MC_BEGIN(2, 0);
10286 IEM_MC_ARG(uint64_t *, puDst, 0);
10287 IEM_MC_ARG(uint64_t const *, puSrc, 1);
10288 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
10289 IEM_MC_PREPARE_FPU_USAGE();
10290 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
10291 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
10292 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
10293 IEM_MC_FPU_TO_MMX_MODE();
10294 IEM_MC_ADVANCE_RIP();
10295 IEM_MC_END();
10296 return VINF_SUCCESS;
10297 }
10298 return IEMOP_RAISE_INVALID_OPCODE();
10299}
10300
10301
10302/** Opcode 0x66 0x0f 0xd7 - */
10303FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
10304{
10305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10306 /* Docs says register only. */
10307 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10308 {
10309 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10310 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
10311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10312 IEM_MC_BEGIN(2, 0);
10313 IEM_MC_ARG(uint64_t *, puDst, 0);
10314 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
10315 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10316 IEM_MC_PREPARE_SSE_USAGE();
10317 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10318 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10319 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
10320 IEM_MC_ADVANCE_RIP();
10321 IEM_MC_END();
10322 return VINF_SUCCESS;
10323 }
10324 return IEMOP_RAISE_INVALID_OPCODE();
10325}
10326
10327
10328/* Opcode 0xf3 0x0f 0xd7 - invalid */
10329/* Opcode 0xf2 0x0f 0xd7 - invalid */
10330
10331
10332/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
10333FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
10334{
10335 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10336 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
10337}
10338
10339
10340/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
10341FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
10342{
10343 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10344 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
10345}
10346
10347
10348/* Opcode 0xf3 0x0f 0xd8 - invalid */
10349/* Opcode 0xf2 0x0f 0xd8 - invalid */
10350
10351/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
10352FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
10353{
10354 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10355 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
10356}
10357
10358
10359/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
10360FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
10361{
10362 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10363 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
10364}
10365
10366
10367/* Opcode 0xf3 0x0f 0xd9 - invalid */
10368/* Opcode 0xf2 0x0f 0xd9 - invalid */
10369
10370/** Opcode 0x0f 0xda - pminub Pq, Qq */
10371FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
10372{
10373 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10374 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
10375}
10376
10377
10378/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
10379FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
10380{
10381 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10382 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
10383}
10384
10385/* Opcode 0xf3 0x0f 0xda - invalid */
10386/* Opcode 0xf2 0x0f 0xda - invalid */
10387
10388/** Opcode 0x0f 0xdb - pand Pq, Qq */
10389FNIEMOP_DEF(iemOp_pand_Pq_Qq)
10390{
10391 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10392 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
10393}
10394
10395
10396/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
10397FNIEMOP_DEF(iemOp_pand_Vx_Wx)
10398{
10399 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10400 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
10401}
10402
10403
10404/* Opcode 0xf3 0x0f 0xdb - invalid */
10405/* Opcode 0xf2 0x0f 0xdb - invalid */
10406
10407/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
10408FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
10409{
10410 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10411 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
10412}
10413
10414
10415/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10416FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10417{
10418 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10419 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10420}
10421
10422
10423/* Opcode 0xf3 0x0f 0xdc - invalid */
10424/* Opcode 0xf2 0x0f 0xdc - invalid */
10425
10426/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10427FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10428{
10429 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10430 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10431}
10432
10433
10434/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
10435FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
10436{
10437 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10438 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
10439}
10440
10441
10442/* Opcode 0xf3 0x0f 0xdd - invalid */
10443/* Opcode 0xf2 0x0f 0xdd - invalid */
10444
10445/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
10446FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
10447{
10448 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10449 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
10450}
10451
10452
10453/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
10454FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
10455{
10456 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10457 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
10458}
10459
10460/* Opcode 0xf3 0x0f 0xde - invalid */
10461/* Opcode 0xf2 0x0f 0xde - invalid */
10462
10463
10464/** Opcode 0x0f 0xdf - pandn Pq, Qq */
10465FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
10466{
10467 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10468 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
10469}
10470
10471
10472/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10473FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10474{
10475 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10476 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10477}
10478
10479
10480/* Opcode 0xf3 0x0f 0xdf - invalid */
10481/* Opcode 0xf2 0x0f 0xdf - invalid */
10482
10483/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10484FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
10485{
10486 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10487 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
10488}
10489
10490
10491/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10492FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
10493{
10494 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10495 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
10496}
10497
10498
10499/* Opcode 0xf3 0x0f 0xe0 - invalid */
10500/* Opcode 0xf2 0x0f 0xe0 - invalid */
10501
10502/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10503FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10504{
10505 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10506 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10507}
10508
10509
10510/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10511FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10512{
10513 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10514 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10515}
10516
10517
10518/* Opcode 0xf3 0x0f 0xe1 - invalid */
10519/* Opcode 0xf2 0x0f 0xe1 - invalid */
10520
10521/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10522FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10523{
10524 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10525 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10526}
10527
10528
10529/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10530FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10531{
10532 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10533 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10534}
10535
10536
10537/* Opcode 0xf3 0x0f 0xe2 - invalid */
10538/* Opcode 0xf2 0x0f 0xe2 - invalid */
10539
10540/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10541FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
10542{
10543 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10544 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
10545}
10546
10547
10548/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10549FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
10550{
10551 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10552 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
10553}
10554
10555
10556/* Opcode 0xf3 0x0f 0xe3 - invalid */
10557/* Opcode 0xf2 0x0f 0xe3 - invalid */
10558
10559/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
10560FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
10561{
10562 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10563 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
10564}
10565
10566
10567/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
10568FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
10569{
10570 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10571 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
10572}
10573
10574
10575/* Opcode 0xf3 0x0f 0xe4 - invalid */
10576/* Opcode 0xf2 0x0f 0xe4 - invalid */
10577
10578/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
10579FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
10580{
10581 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10582 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
10583}
10584
10585
10586/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
10587FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
10588{
10589 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10590 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
10591}
10592
10593
10594/* Opcode 0xf3 0x0f 0xe5 - invalid */
10595/* Opcode 0xf2 0x0f 0xe5 - invalid */
10596
10597/* Opcode 0x0f 0xe6 - invalid */
10598/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
10599FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
10600/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
10601FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
10602/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
10603FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
10604
10605
10606/**
10607 * @opcode 0xe7
10608 * @opcodesub !11 mr/reg
10609 * @oppfx none
10610 * @opcpuid sse
10611 * @opgroup og_sse1_cachect
10612 * @opxcpttype none
10613 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
10614 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10615 */
10616FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
10617{
10618 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10620 if (IEM_IS_MODRM_MEM_MODE(bRm))
10621 {
10622 /* Register, memory. */
10623 IEM_MC_BEGIN(0, 2);
10624 IEM_MC_LOCAL(uint64_t, uSrc);
10625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10626
10627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10629 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
10630 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10631
10632 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
10633 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10634 IEM_MC_FPU_TO_MMX_MODE();
10635
10636 IEM_MC_ADVANCE_RIP();
10637 IEM_MC_END();
10638 return VINF_SUCCESS;
10639 }
10640 /**
10641 * @opdone
10642 * @opmnemonic ud0fe7reg
10643 * @opcode 0xe7
10644 * @opcodesub 11 mr/reg
10645 * @oppfx none
10646 * @opunused immediate
10647 * @opcpuid sse
10648 * @optest ->
10649 */
10650 return IEMOP_RAISE_INVALID_OPCODE();
10651}
10652
10653/**
10654 * @opcode 0xe7
10655 * @opcodesub !11 mr/reg
10656 * @oppfx 0x66
10657 * @opcpuid sse2
10658 * @opgroup og_sse2_cachect
10659 * @opxcpttype 1
10660 * @optest op1=-1 op2=2 -> op1=2
10661 * @optest op1=0 op2=-42 -> op1=-42
10662 */
10663FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
10664{
10665 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10667 if (IEM_IS_MODRM_MEM_MODE(bRm))
10668 {
10669 /* Register, memory. */
10670 IEM_MC_BEGIN(0, 2);
10671 IEM_MC_LOCAL(RTUINT128U, uSrc);
10672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10673
10674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10676 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10677 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10678
10679 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10680 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10681
10682 IEM_MC_ADVANCE_RIP();
10683 IEM_MC_END();
10684 return VINF_SUCCESS;
10685 }
10686
10687 /**
10688 * @opdone
10689 * @opmnemonic ud660fe7reg
10690 * @opcode 0xe7
10691 * @opcodesub 11 mr/reg
10692 * @oppfx 0x66
10693 * @opunused immediate
10694 * @opcpuid sse
10695 * @optest ->
10696 */
10697 return IEMOP_RAISE_INVALID_OPCODE();
10698}
10699
10700/* Opcode 0xf3 0x0f 0xe7 - invalid */
10701/* Opcode 0xf2 0x0f 0xe7 - invalid */
10702
10703
10704/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
10705FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
10706{
10707 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10708 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
10709}
10710
10711
10712/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
10713FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
10714{
10715 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10716 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
10717}
10718
10719
10720/* Opcode 0xf3 0x0f 0xe8 - invalid */
10721/* Opcode 0xf2 0x0f 0xe8 - invalid */
10722
10723/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
10724FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
10725{
10726 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10727 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
10728}
10729
10730
10731/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
10732FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
10733{
10734 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10735 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
10736}
10737
10738
10739/* Opcode 0xf3 0x0f 0xe9 - invalid */
10740/* Opcode 0xf2 0x0f 0xe9 - invalid */
10741
10742
10743/** Opcode 0x0f 0xea - pminsw Pq, Qq */
10744FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
10745{
10746 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10747 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
10748}
10749
10750
10751/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
10752FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
10753{
10754 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10755 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
10756}
10757
10758
10759/* Opcode 0xf3 0x0f 0xea - invalid */
10760/* Opcode 0xf2 0x0f 0xea - invalid */
10761
10762
10763/** Opcode 0x0f 0xeb - por Pq, Qq */
10764FNIEMOP_DEF(iemOp_por_Pq_Qq)
10765{
10766 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10767 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
10768}
10769
10770
10771/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
10772FNIEMOP_DEF(iemOp_por_Vx_Wx)
10773{
10774 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10775 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
10776}
10777
10778
10779/* Opcode 0xf3 0x0f 0xeb - invalid */
10780/* Opcode 0xf2 0x0f 0xeb - invalid */
10781
10782/** Opcode 0x0f 0xec - paddsb Pq, Qq */
10783FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
10784{
10785 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10786 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
10787}
10788
10789
10790/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
10791FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
10792{
10793 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10794 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
10795}
10796
10797
10798/* Opcode 0xf3 0x0f 0xec - invalid */
10799/* Opcode 0xf2 0x0f 0xec - invalid */
10800
10801/** Opcode 0x0f 0xed - paddsw Pq, Qq */
10802FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
10803{
10804 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10805 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
10806}
10807
10808
10809/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
10810FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
10811{
10812 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10813 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
10814}
10815
10816
10817/* Opcode 0xf3 0x0f 0xed - invalid */
10818/* Opcode 0xf2 0x0f 0xed - invalid */
10819
10820
10821/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
10822FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
10823{
10824 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10825 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
10826}
10827
10828
10829/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
10830FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
10831{
10832 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10833 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
10834}
10835
10836
10837/* Opcode 0xf3 0x0f 0xee - invalid */
10838/* Opcode 0xf2 0x0f 0xee - invalid */
10839
10840
10841/** Opcode 0x0f 0xef - pxor Pq, Qq */
10842FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
10843{
10844 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10845 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
10846}
10847
10848
10849/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
10850FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
10851{
10852 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10853 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
10854}
10855
10856
10857/* Opcode 0xf3 0x0f 0xef - invalid */
10858/* Opcode 0xf2 0x0f 0xef - invalid */
10859
10860/* Opcode 0x0f 0xf0 - invalid */
10861/* Opcode 0x66 0x0f 0xf0 - invalid */
10862/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
10863FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
10864
10865
10866/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
10867FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
10868{
10869 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10870 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
10871}
10872
10873
10874/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
10875FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
10876{
10877 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10878 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
10879}
10880
10881
10882/* Opcode 0xf2 0x0f 0xf1 - invalid */
10883
10884/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
10885FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
10886{
10887 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10888 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
10889}
10890
10891
10892/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
10893FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
10894{
10895 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10896 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
10897}
10898
10899
10900/* Opcode 0xf2 0x0f 0xf2 - invalid */
10901
10902/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
10903FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
10904{
10905 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10906 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
10907}
10908
10909
10910/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
10911FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
10912{
10913 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10914 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
10915}
10916
10917/* Opcode 0xf2 0x0f 0xf3 - invalid */
10918
10919/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
10920FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
10921{
10922 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10923 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
10924}
10925
10926
10927/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
10928FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
10929{
10930 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10931 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
10932}
10933
10934
10935/* Opcode 0xf2 0x0f 0xf4 - invalid */
10936
10937/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
10938FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
10939{
10940 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10941 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
10942}
10943
10944
10945/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
10946FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
10947{
10948 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10949 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
10950}
10951
10952/* Opcode 0xf2 0x0f 0xf5 - invalid */
10953
10954/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
10955FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
10956{
10957 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10958 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
10959}
10960
10961
10962/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
10963FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
10964{
10965 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10966 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
10967}
10968
10969
10970/* Opcode 0xf2 0x0f 0xf6 - invalid */
10971
10972/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
10973FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
10974/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
10975FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
10976/* Opcode 0xf2 0x0f 0xf7 - invalid */
10977
10978
10979/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
10980FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
10981{
10982 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10983 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
10984}
10985
10986
10987/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
10988FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
10989{
10990 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10991 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
10992}
10993
10994
10995/* Opcode 0xf2 0x0f 0xf8 - invalid */
10996
10997
10998/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
10999FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
11000{
11001 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11002 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
11003}
11004
11005
11006/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
11007FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
11008{
11009 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11010 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
11011}
11012
11013
11014/* Opcode 0xf2 0x0f 0xf9 - invalid */
11015
11016
11017/** Opcode 0x0f 0xfa - psubd Pq, Qq */
11018FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
11019{
11020 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11021 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
11022}
11023
11024
11025/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
11026FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
11027{
11028 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11029 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
11030}
11031
11032
11033/* Opcode 0xf2 0x0f 0xfa - invalid */
11034
11035
11036/** Opcode 0x0f 0xfb - psubq Pq, Qq */
11037FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
11038{
11039 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11040 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
11041}
11042
11043
11044/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
11045FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
11046{
11047 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11048 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
11049}
11050
11051
11052/* Opcode 0xf2 0x0f 0xfb - invalid */
11053
11054
11055/** Opcode 0x0f 0xfc - paddb Pq, Qq */
11056FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
11057{
11058 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11059 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
11060}
11061
11062
11063/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
11064FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
11065{
11066 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11067 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
11068}
11069
11070
11071/* Opcode 0xf2 0x0f 0xfc - invalid */
11072
11073
11074/** Opcode 0x0f 0xfd - paddw Pq, Qq */
11075FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
11076{
11077 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11078 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
11079}
11080
11081
11082/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
11083FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
11084{
11085 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11086 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
11087}
11088
11089
11090/* Opcode 0xf2 0x0f 0xfd - invalid */
11091
11092
11093/** Opcode 0x0f 0xfe - paddd Pq, Qq */
11094FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
11095{
11096 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11097 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
11098}
11099
11100
11101/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
11102FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
11103{
11104 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11105 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
11106}
11107
11108
11109/* Opcode 0xf2 0x0f 0xfe - invalid */
11110
11111
11112/** Opcode **** 0x0f 0xff - UD0 */
11113FNIEMOP_DEF(iemOp_ud0)
11114{
11115 IEMOP_MNEMONIC(ud0, "ud0");
11116 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
11117 {
11118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
11119#ifndef TST_IEM_CHECK_MC
11120 if (IEM_IS_MODRM_MEM_MODE(bRm))
11121 {
11122 RTGCPTR GCPtrEff;
11123 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
11124 if (rcStrict != VINF_SUCCESS)
11125 return rcStrict;
11126 }
11127#endif
11128 IEMOP_HLP_DONE_DECODING();
11129 }
11130 return IEMOP_RAISE_INVALID_OPCODE();
11131}
11132
11133
11134
11135/**
11136 * Two byte opcode map, first byte 0x0f.
11137 *
11138 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
11139 * check if it needs updating as well when making changes.
11140 */
11141IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
11142{
11143 /* no prefix, 066h prefix f3h prefix, f2h prefix */
11144 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
11145 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
11146 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
11147 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
11148 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
11149 /* 0x05 */ IEMOP_X4(iemOp_syscall),
11150 /* 0x06 */ IEMOP_X4(iemOp_clts),
11151 /* 0x07 */ IEMOP_X4(iemOp_sysret),
11152 /* 0x08 */ IEMOP_X4(iemOp_invd),
11153 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
11154 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
11155 /* 0x0b */ IEMOP_X4(iemOp_ud2),
11156 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
11157 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
11158 /* 0x0e */ IEMOP_X4(iemOp_femms),
11159 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
11160
11161 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
11162 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
11163 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
11164 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11165 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11166 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11167 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
11168 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11169 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
11170 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
11171 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
11172 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
11173 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
11174 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
11175 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
11176 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
11177
11178 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
11179 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
11180 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
11181 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
11182 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
11183 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11184 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
11185 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11186 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11187 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11188 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
11189 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11190 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
11191 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
11192 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11193 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11194
11195 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
11196 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
11197 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
11198 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
11199 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
11200 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
11201 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
11202 /* 0x37 */ IEMOP_X4(iemOp_getsec),
11203 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
11204 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11205 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
11206 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11207 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11208 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11209 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11210 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11211
11212 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
11213 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
11214 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
11215 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
11216 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
11217 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
11218 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
11219 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
11220 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
11221 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
11222 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
11223 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
11224 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
11225 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
11226 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
11227 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
11228
11229 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11230 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
11231 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
11232 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
11233 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11234 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11235 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11236 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11237 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
11238 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
11239 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
11240 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
11241 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
11242 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
11243 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
11244 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
11245
11246 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11247 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11248 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11249 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11250 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11251 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11252 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11253 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11254 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11255 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11256 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11257 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11258 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11259 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11260 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11261 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
11262
11263 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
11264 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
11265 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
11266 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
11267 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11268 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11269 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11270 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11271
11272 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11273 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11274 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11275 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11276 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
11277 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
11278 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
11279 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
11280
11281 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
11282 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
11283 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
11284 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
11285 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
11286 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
11287 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
11288 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
11289 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
11290 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
11291 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
11292 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
11293 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
11294 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
11295 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
11296 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
11297
11298 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
11299 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
11300 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
11301 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
11302 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
11303 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
11304 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
11305 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
11306 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
11307 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
11308 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
11309 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
11310 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
11311 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
11312 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
11313 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
11314
11315 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
11316 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
11317 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
11318 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
11319 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
11320 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
11321 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
11322 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
11323 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
11324 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
11325 /* 0xaa */ IEMOP_X4(iemOp_rsm),
11326 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
11327 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
11328 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
11329 /* 0xae */ IEMOP_X4(iemOp_Grp15),
11330 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
11331
11332 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
11333 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
11334 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
11335 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
11336 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
11337 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
11338 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
11339 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
11340 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
11341 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
11342 /* 0xba */ IEMOP_X4(iemOp_Grp8),
11343 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
11344 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
11345 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
11346 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
11347 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
11348
11349 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
11350 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
11351 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
11352 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11353 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11354 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11355 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11356 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
11357 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
11358 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
11359 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
11360 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
11361 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
11362 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
11363 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
11364 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
11365
11366 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
11367 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11368 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11369 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11370 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11371 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11372 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
11373 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11374 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11375 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11376 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11377 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11378 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11379 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11380 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11381 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11382
11383 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11384 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11385 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11386 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11387 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11388 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11389 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
11390 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11391 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11392 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11393 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11394 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11395 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11396 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11397 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11398 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11399
11400 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
11401 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11402 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11403 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11404 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11405 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11406 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11407 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11408 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11409 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11410 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11411 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11412 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11413 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11414 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11415 /* 0xff */ IEMOP_X4(iemOp_ud0),
11416};
11417AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
11418
11419/** @} */
11420
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette