VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96403

Last change on this file since 96403 was 96403, checked in by vboxsync, 2 years ago

VMM/IEM: Implement [v]shufps/[v]shufpd instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 398.2 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96403 2022-08-22 16:01:16Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * for instructions introduced with SSE.
149 */
150FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for MMX instructions on the form:
205 * pxxx mm1, mm2/mem64
206 * for instructions introduced with SSE.
207 *
208 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
209 * no FXSAVE state, just the operands.
210 */
211FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
212{
213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
214 if (IEM_IS_MODRM_REG_MODE(bRm))
215 {
216 /*
217 * Register, register.
218 */
219 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
220 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
222 IEM_MC_BEGIN(2, 0);
223 IEM_MC_ARG(uint64_t *, pDst, 0);
224 IEM_MC_ARG(uint64_t const *, pSrc, 1);
225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
226 IEM_MC_PREPARE_FPU_USAGE();
227 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
228 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
229 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
230 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
231 IEM_MC_FPU_TO_MMX_MODE();
232 IEM_MC_ADVANCE_RIP();
233 IEM_MC_END();
234 }
235 else
236 {
237 /*
238 * Register, memory.
239 */
240 IEM_MC_BEGIN(2, 2);
241 IEM_MC_ARG(uint64_t *, pDst, 0);
242 IEM_MC_LOCAL(uint64_t, uSrc);
243 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250
251 IEM_MC_PREPARE_FPU_USAGE();
252 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
253 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
254 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
255 IEM_MC_FPU_TO_MMX_MODE();
256
257 IEM_MC_ADVANCE_RIP();
258 IEM_MC_END();
259 }
260 return VINF_SUCCESS;
261}
262
263
264/**
265 * Common worker for MMX instructions on the form:
266 * pxxx mm1, mm2/mem64
267 * that was introduced with SSE2.
268 */
269FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
278 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
280 IEM_MC_BEGIN(2, 0);
281 IEM_MC_ARG(uint64_t *, pDst, 0);
282 IEM_MC_ARG(uint64_t const *, pSrc, 1);
283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
284 IEM_MC_PREPARE_FPU_USAGE();
285 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
286 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
287 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
288 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
289 IEM_MC_FPU_TO_MMX_MODE();
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 else
294 {
295 /*
296 * Register, memory.
297 */
298 IEM_MC_BEGIN(2, 2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_LOCAL(uint64_t, uSrc);
301 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
303
304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
307 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
308
309 IEM_MC_PREPARE_FPU_USAGE();
310 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313 IEM_MC_FPU_TO_MMX_MODE();
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 }
318 return VINF_SUCCESS;
319}
320
321
322/**
323 * Common worker for SSE2 instructions on the forms:
324 * pxxx xmm1, xmm2/mem128
325 *
326 * Proper alignment of the 128-bit operand is enforced.
327 * Exceptions type 4. SSE2 cpuid checks.
328 *
329 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
330 */
331FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
332{
333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
334 if (IEM_IS_MODRM_REG_MODE(bRm))
335 {
336 /*
337 * Register, register.
338 */
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEM_MC_BEGIN(2, 0);
341 IEM_MC_ARG(PRTUINT128U, pDst, 0);
342 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
344 IEM_MC_PREPARE_SSE_USAGE();
345 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
346 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
347 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 }
351 else
352 {
353 /*
354 * Register, memory.
355 */
356 IEM_MC_BEGIN(2, 2);
357 IEM_MC_ARG(PRTUINT128U, pDst, 0);
358 IEM_MC_LOCAL(RTUINT128U, uSrc);
359 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
361
362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
365 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366
367 IEM_MC_PREPARE_SSE_USAGE();
368 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
369 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
370
371 IEM_MC_ADVANCE_RIP();
372 IEM_MC_END();
373 }
374 return VINF_SUCCESS;
375}
376
377
378/**
379 * Common worker for SSE2 instructions on the forms:
380 * pxxx xmm1, xmm2/mem128
381 *
382 * Proper alignment of the 128-bit operand is enforced.
383 * Exceptions type 4. SSE2 cpuid checks.
384 *
385 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
386 * no FXSAVE state, just the operands.
387 *
388 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
389 */
390FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
391{
392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
393 if (IEM_IS_MODRM_REG_MODE(bRm))
394 {
395 /*
396 * Register, register.
397 */
398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
399 IEM_MC_BEGIN(2, 0);
400 IEM_MC_ARG(PRTUINT128U, pDst, 0);
401 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
403 IEM_MC_PREPARE_SSE_USAGE();
404 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
405 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
406 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 }
410 else
411 {
412 /*
413 * Register, memory.
414 */
415 IEM_MC_BEGIN(2, 2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_LOCAL(RTUINT128U, uSrc);
418 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
420
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
425
426 IEM_MC_PREPARE_SSE_USAGE();
427 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
428 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
429
430 IEM_MC_ADVANCE_RIP();
431 IEM_MC_END();
432 }
433 return VINF_SUCCESS;
434}
435
436
437/**
438 * Common worker for MMX instructions on the forms:
439 * pxxxx mm1, mm2/mem32
440 *
441 * The 2nd operand is the first half of a register, which in the memory case
442 * means a 32-bit memory access.
443 */
444FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
445{
446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
447 if (IEM_IS_MODRM_REG_MODE(bRm))
448 {
449 /*
450 * Register, register.
451 */
452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
453 IEM_MC_BEGIN(2, 0);
454 IEM_MC_ARG(uint64_t *, puDst, 0);
455 IEM_MC_ARG(uint64_t const *, puSrc, 1);
456 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
457 IEM_MC_PREPARE_FPU_USAGE();
458 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
459 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
460 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
461 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
462 IEM_MC_FPU_TO_MMX_MODE();
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 }
466 else
467 {
468 /*
469 * Register, memory.
470 */
471 IEM_MC_BEGIN(2, 2);
472 IEM_MC_ARG(uint64_t *, puDst, 0);
473 IEM_MC_LOCAL(uint64_t, uSrc);
474 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
476
477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
479 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
480 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
481
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
484 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
485 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
486 IEM_MC_FPU_TO_MMX_MODE();
487
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 return VINF_SUCCESS;
492}
493
494
495/**
496 * Common worker for SSE instructions on the forms:
497 * pxxxx xmm1, xmm2/mem128
498 *
499 * The 2nd operand is the first half of a register, which in the memory case
500 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
501 *
502 * Exceptions type 4.
503 */
504FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
505{
506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
507 if (IEM_IS_MODRM_REG_MODE(bRm))
508 {
509 /*
510 * Register, register.
511 */
512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
513 IEM_MC_BEGIN(2, 0);
514 IEM_MC_ARG(PRTUINT128U, puDst, 0);
515 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
518 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
519 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
520 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
521 IEM_MC_ADVANCE_RIP();
522 IEM_MC_END();
523 }
524 else
525 {
526 /*
527 * Register, memory.
528 */
529 IEM_MC_BEGIN(2, 2);
530 IEM_MC_ARG(PRTUINT128U, puDst, 0);
531 IEM_MC_LOCAL(RTUINT128U, uSrc);
532 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
538 /** @todo Most CPUs probably only read the low qword. We read everything to
539 * make sure we apply segmentation and alignment checks correctly.
540 * When we have time, it would be interesting to explore what real
541 * CPUs actually does and whether it will do a TLB load for the high
542 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
543 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
544
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
548
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 return VINF_SUCCESS;
553}
554
555
556/**
557 * Common worker for SSE2 instructions on the forms:
558 * pxxxx xmm1, xmm2/mem128
559 *
560 * The 2nd operand is the first half of a register, which in the memory case
561 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
562 *
563 * Exceptions type 4.
564 */
565FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
566{
567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
568 if (IEM_IS_MODRM_REG_MODE(bRm))
569 {
570 /*
571 * Register, register.
572 */
573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
574 IEM_MC_BEGIN(2, 0);
575 IEM_MC_ARG(PRTUINT128U, puDst, 0);
576 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
579 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
580 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
581 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 }
585 else
586 {
587 /*
588 * Register, memory.
589 */
590 IEM_MC_BEGIN(2, 2);
591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
592 IEM_MC_LOCAL(RTUINT128U, uSrc);
593 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
595
596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
599 /** @todo Most CPUs probably only read the low qword. We read everything to
600 * make sure we apply segmentation and alignment checks correctly.
601 * When we have time, it would be interesting to explore what real
602 * CPUs actually does and whether it will do a TLB load for the high
603 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
604 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
605
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
609
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 return VINF_SUCCESS;
614}
615
616
617/**
618 * Common worker for MMX instructions on the form:
619 * pxxxx mm1, mm2/mem64
620 *
621 * The 2nd operand is the second half of a register, which in the memory case
622 * means a 64-bit memory access for MMX.
623 */
624FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
625{
626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
627 if (IEM_IS_MODRM_REG_MODE(bRm))
628 {
629 /*
630 * Register, register.
631 */
632 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
633 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
635 IEM_MC_BEGIN(2, 0);
636 IEM_MC_ARG(uint64_t *, puDst, 0);
637 IEM_MC_ARG(uint64_t const *, puSrc, 1);
638 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
639 IEM_MC_PREPARE_FPU_USAGE();
640 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
641 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
642 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
643 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
644 IEM_MC_FPU_TO_MMX_MODE();
645 IEM_MC_ADVANCE_RIP();
646 IEM_MC_END();
647 }
648 else
649 {
650 /*
651 * Register, memory.
652 */
653 IEM_MC_BEGIN(2, 2);
654 IEM_MC_ARG(uint64_t *, puDst, 0);
655 IEM_MC_LOCAL(uint64_t, uSrc);
656 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
658
659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
661 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
662 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
663
664 IEM_MC_PREPARE_FPU_USAGE();
665 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
666 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
667 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_ADVANCE_RIP();
671 IEM_MC_END();
672 }
673 return VINF_SUCCESS;
674}
675
676
677/**
678 * Common worker for SSE instructions on the form:
679 * pxxxx xmm1, xmm2/mem128
680 *
681 * The 2nd operand is the second half of a register, which for SSE a 128-bit
682 * aligned access where it may read the full 128 bits or only the upper 64 bits.
683 *
684 * Exceptions type 4.
685 */
686FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
687{
688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
689 if (IEM_IS_MODRM_REG_MODE(bRm))
690 {
691 /*
692 * Register, register.
693 */
694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
695 IEM_MC_BEGIN(2, 0);
696 IEM_MC_ARG(PRTUINT128U, puDst, 0);
697 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
699 IEM_MC_PREPARE_SSE_USAGE();
700 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
701 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
702 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
703 IEM_MC_ADVANCE_RIP();
704 IEM_MC_END();
705 }
706 else
707 {
708 /*
709 * Register, memory.
710 */
711 IEM_MC_BEGIN(2, 2);
712 IEM_MC_ARG(PRTUINT128U, puDst, 0);
713 IEM_MC_LOCAL(RTUINT128U, uSrc);
714 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
716
717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
720 /** @todo Most CPUs probably only read the high qword. We read everything to
721 * make sure we apply segmentation and alignment checks correctly.
722 * When we have time, it would be interesting to explore what real
723 * CPUs actually does and whether it will do a TLB load for the lower
724 * part or skip any associated \#PF. */
725 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
726
727 IEM_MC_PREPARE_SSE_USAGE();
728 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
729 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
730
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 return VINF_SUCCESS;
735}
736
737
738/**
739 * Common worker for SSE instructions on the forms:
740 * pxxs xmm1, xmm2/mem128
741 *
742 * Proper alignment of the 128-bit operand is enforced.
743 * Exceptions type 2. SSE cpuid checks.
744 *
745 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
746 */
747FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
748{
749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
750 if (IEM_IS_MODRM_REG_MODE(bRm))
751 {
752 /*
753 * Register, register.
754 */
755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
758 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
759 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
760 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
761 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
762 IEM_MC_PREPARE_SSE_USAGE();
763 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
764 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
765 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
766 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
767 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
768
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 else
773 {
774 /*
775 * Register, memory.
776 */
777 IEM_MC_BEGIN(3, 2);
778 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
779 IEM_MC_LOCAL(X86XMMREG, uSrc2);
780 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
781 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
782 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
788 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789
790 IEM_MC_PREPARE_SSE_USAGE();
791 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
795
796 IEM_MC_ADVANCE_RIP();
797 IEM_MC_END();
798 }
799 return VINF_SUCCESS;
800}
801
802
803/**
804 * Common worker for SSE instructions on the forms:
805 * pxxs xmm1, xmm2/mem32
806 *
807 * Proper alignment of the 128-bit operand is enforced.
808 * Exceptions type 2. SSE cpuid checks.
809 *
810 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
811 */
812FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
813{
814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
815 if (IEM_IS_MODRM_REG_MODE(bRm))
816 {
817 /*
818 * Register, register.
819 */
820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
821 IEM_MC_BEGIN(3, 1);
822 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
823 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
824 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
825 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
826 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
827 IEM_MC_PREPARE_SSE_USAGE();
828 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
829 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
830 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
831 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
832 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
833
834 IEM_MC_ADVANCE_RIP();
835 IEM_MC_END();
836 }
837 else
838 {
839 /*
840 * Register, memory.
841 */
842 IEM_MC_BEGIN(3, 2);
843 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
844 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
845 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
846 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
847 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
849
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
853 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854
855 IEM_MC_PREPARE_SSE_USAGE();
856 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
857 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
858 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
859 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
860
861 IEM_MC_ADVANCE_RIP();
862 IEM_MC_END();
863 }
864 return VINF_SUCCESS;
865}
866
867
868/**
869 * Common worker for SSE2 instructions on the forms:
870 * pxxd xmm1, xmm2/mem128
871 *
872 * Proper alignment of the 128-bit operand is enforced.
873 * Exceptions type 2. SSE cpuid checks.
874 *
875 * @sa iemOpCommonSseFp_FullFull_To_Full
876 */
877FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
878{
879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
880 if (IEM_IS_MODRM_REG_MODE(bRm))
881 {
882 /*
883 * Register, register.
884 */
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_BEGIN(3, 1);
887 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
888 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
889 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
890 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
891 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
892 IEM_MC_PREPARE_SSE_USAGE();
893 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
894 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
895 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
896 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
897 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
898
899 IEM_MC_ADVANCE_RIP();
900 IEM_MC_END();
901 }
902 else
903 {
904 /*
905 * Register, memory.
906 */
907 IEM_MC_BEGIN(3, 2);
908 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
909 IEM_MC_LOCAL(X86XMMREG, uSrc2);
910 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
911 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
912 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
914
915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
917 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
918 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
919
920 IEM_MC_PREPARE_SSE_USAGE();
921 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
922 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
923 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
924 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
925
926 IEM_MC_ADVANCE_RIP();
927 IEM_MC_END();
928 }
929 return VINF_SUCCESS;
930}
931
932
933/**
934 * Common worker for SSE2 instructions on the forms:
935 * pxxs xmm1, xmm2/mem64
936 *
937 * Proper alignment of the 128-bit operand is enforced.
938 * Exceptions type 2. SSE2 cpuid checks.
939 *
940 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
941 */
942FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
943{
944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
945 if (IEM_IS_MODRM_REG_MODE(bRm))
946 {
947 /*
948 * Register, register.
949 */
950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
951 IEM_MC_BEGIN(3, 1);
952 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
953 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
954 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
955 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
957 IEM_MC_PREPARE_SSE_USAGE();
958 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
959 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
960 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
961 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
962 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
963
964 IEM_MC_ADVANCE_RIP();
965 IEM_MC_END();
966 }
967 else
968 {
969 /*
970 * Register, memory.
971 */
972 IEM_MC_BEGIN(3, 2);
973 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
974 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
975 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
976 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
977 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
979
980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
983 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
984
985 IEM_MC_PREPARE_SSE_USAGE();
986 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
987 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
988 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
989 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
990
991 IEM_MC_ADVANCE_RIP();
992 IEM_MC_END();
993 }
994 return VINF_SUCCESS;
995}
996
997
998/**
999 * Common worker for SSE2 instructions on the form:
1000 * pxxxx xmm1, xmm2/mem128
1001 *
1002 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1003 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1004 *
1005 * Exceptions type 4.
1006 */
1007FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1008{
1009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1010 if (IEM_IS_MODRM_REG_MODE(bRm))
1011 {
1012 /*
1013 * Register, register.
1014 */
1015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1016 IEM_MC_BEGIN(2, 0);
1017 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1018 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1019 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1020 IEM_MC_PREPARE_SSE_USAGE();
1021 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1022 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1023 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1024 IEM_MC_ADVANCE_RIP();
1025 IEM_MC_END();
1026 }
1027 else
1028 {
1029 /*
1030 * Register, memory.
1031 */
1032 IEM_MC_BEGIN(2, 2);
1033 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1034 IEM_MC_LOCAL(RTUINT128U, uSrc);
1035 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1037
1038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1040 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1041 /** @todo Most CPUs probably only read the high qword. We read everything to
1042 * make sure we apply segmentation and alignment checks correctly.
1043 * When we have time, it would be interesting to explore what real
1044 * CPUs actually does and whether it will do a TLB load for the lower
1045 * part or skip any associated \#PF. */
1046 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1047
1048 IEM_MC_PREPARE_SSE_USAGE();
1049 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1050 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1051
1052 IEM_MC_ADVANCE_RIP();
1053 IEM_MC_END();
1054 }
1055 return VINF_SUCCESS;
1056}
1057
1058
1059/**
1060 * Common worker for SSE3 instructions on the forms:
1061 * hxxx xmm1, xmm2/mem128
1062 *
1063 * Proper alignment of the 128-bit operand is enforced.
1064 * Exceptions type 2. SSE3 cpuid checks.
1065 *
1066 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1067 */
1068FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1069{
1070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1071 if (IEM_IS_MODRM_REG_MODE(bRm))
1072 {
1073 /*
1074 * Register, register.
1075 */
1076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1077 IEM_MC_BEGIN(3, 1);
1078 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1079 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1080 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1081 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1082 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1083 IEM_MC_PREPARE_SSE_USAGE();
1084 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1085 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1086 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1087 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1088 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1089
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Register, memory.
1097 */
1098 IEM_MC_BEGIN(3, 2);
1099 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1100 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1101 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1102 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1103 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1105
1106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1108 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
1109 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1110
1111 IEM_MC_PREPARE_SSE_USAGE();
1112 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1113 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1114 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1115 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1116
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 }
1120 return VINF_SUCCESS;
1121}
1122
1123
1124/** Opcode 0x0f 0x00 /0. */
1125FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1126{
1127 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1128 IEMOP_HLP_MIN_286();
1129 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1130
1131 if (IEM_IS_MODRM_REG_MODE(bRm))
1132 {
1133 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1134 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1135 }
1136
1137 /* Ignore operand size here, memory refs are always 16-bit. */
1138 IEM_MC_BEGIN(2, 0);
1139 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1140 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1142 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1143 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1144 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1145 IEM_MC_END();
1146 return VINF_SUCCESS;
1147}
1148
1149
1150/** Opcode 0x0f 0x00 /1. */
1151FNIEMOPRM_DEF(iemOp_Grp6_str)
1152{
1153 IEMOP_MNEMONIC(str, "str Rv/Mw");
1154 IEMOP_HLP_MIN_286();
1155 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1156
1157
1158 if (IEM_IS_MODRM_REG_MODE(bRm))
1159 {
1160 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1161 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1162 }
1163
1164 /* Ignore operand size here, memory refs are always 16-bit. */
1165 IEM_MC_BEGIN(2, 0);
1166 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1167 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1169 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1170 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1171 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1172 IEM_MC_END();
1173 return VINF_SUCCESS;
1174}
1175
1176
1177/** Opcode 0x0f 0x00 /2. */
1178FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1179{
1180 IEMOP_MNEMONIC(lldt, "lldt Ew");
1181 IEMOP_HLP_MIN_286();
1182 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1183
1184 if (IEM_IS_MODRM_REG_MODE(bRm))
1185 {
1186 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1187 IEM_MC_BEGIN(1, 0);
1188 IEM_MC_ARG(uint16_t, u16Sel, 0);
1189 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1190 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1191 IEM_MC_END();
1192 }
1193 else
1194 {
1195 IEM_MC_BEGIN(1, 1);
1196 IEM_MC_ARG(uint16_t, u16Sel, 0);
1197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1199 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1200 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1201 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1202 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1203 IEM_MC_END();
1204 }
1205 return VINF_SUCCESS;
1206}
1207
1208
1209/** Opcode 0x0f 0x00 /3. */
1210FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1211{
1212 IEMOP_MNEMONIC(ltr, "ltr Ew");
1213 IEMOP_HLP_MIN_286();
1214 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1215
1216 if (IEM_IS_MODRM_REG_MODE(bRm))
1217 {
1218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1219 IEM_MC_BEGIN(1, 0);
1220 IEM_MC_ARG(uint16_t, u16Sel, 0);
1221 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1222 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1223 IEM_MC_END();
1224 }
1225 else
1226 {
1227 IEM_MC_BEGIN(1, 1);
1228 IEM_MC_ARG(uint16_t, u16Sel, 0);
1229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1232 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1233 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1234 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1235 IEM_MC_END();
1236 }
1237 return VINF_SUCCESS;
1238}
1239
1240
1241/** Opcode 0x0f 0x00 /3. */
1242FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1243{
1244 IEMOP_HLP_MIN_286();
1245 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1246
1247 if (IEM_IS_MODRM_REG_MODE(bRm))
1248 {
1249 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1250 IEM_MC_BEGIN(2, 0);
1251 IEM_MC_ARG(uint16_t, u16Sel, 0);
1252 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1253 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1254 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1255 IEM_MC_END();
1256 }
1257 else
1258 {
1259 IEM_MC_BEGIN(2, 1);
1260 IEM_MC_ARG(uint16_t, u16Sel, 0);
1261 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1264 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1265 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1266 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1267 IEM_MC_END();
1268 }
1269 return VINF_SUCCESS;
1270}
1271
1272
1273/** Opcode 0x0f 0x00 /4. */
1274FNIEMOPRM_DEF(iemOp_Grp6_verr)
1275{
1276 IEMOP_MNEMONIC(verr, "verr Ew");
1277 IEMOP_HLP_MIN_286();
1278 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1279}
1280
1281
1282/** Opcode 0x0f 0x00 /5. */
1283FNIEMOPRM_DEF(iemOp_Grp6_verw)
1284{
1285 IEMOP_MNEMONIC(verw, "verw Ew");
1286 IEMOP_HLP_MIN_286();
1287 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1288}
1289
1290
1291/**
1292 * Group 6 jump table.
1293 */
1294IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1295{
1296 iemOp_Grp6_sldt,
1297 iemOp_Grp6_str,
1298 iemOp_Grp6_lldt,
1299 iemOp_Grp6_ltr,
1300 iemOp_Grp6_verr,
1301 iemOp_Grp6_verw,
1302 iemOp_InvalidWithRM,
1303 iemOp_InvalidWithRM
1304};
1305
1306/** Opcode 0x0f 0x00. */
1307FNIEMOP_DEF(iemOp_Grp6)
1308{
1309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1310 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1311}
1312
1313
1314/** Opcode 0x0f 0x01 /0. */
1315FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1316{
1317 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1318 IEMOP_HLP_MIN_286();
1319 IEMOP_HLP_64BIT_OP_SIZE();
1320 IEM_MC_BEGIN(2, 1);
1321 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1322 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1325 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1326 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1327 IEM_MC_END();
1328 return VINF_SUCCESS;
1329}
1330
1331
1332/** Opcode 0x0f 0x01 /0. */
1333FNIEMOP_DEF(iemOp_Grp7_vmcall)
1334{
1335 IEMOP_MNEMONIC(vmcall, "vmcall");
1336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1337
1338 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1339 want all hypercalls regardless of instruction used, and if a
1340 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1341 (NEM/win makes ASSUMPTIONS about this behavior.) */
1342 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1343}
1344
1345
1346/** Opcode 0x0f 0x01 /0. */
1347#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1348FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1349{
1350 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1351 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1352 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1353 IEMOP_HLP_DONE_DECODING();
1354 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1355}
1356#else
1357FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1358{
1359 IEMOP_BITCH_ABOUT_STUB();
1360 return IEMOP_RAISE_INVALID_OPCODE();
1361}
1362#endif
1363
1364
1365/** Opcode 0x0f 0x01 /0. */
1366#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1367FNIEMOP_DEF(iemOp_Grp7_vmresume)
1368{
1369 IEMOP_MNEMONIC(vmresume, "vmresume");
1370 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1371 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1372 IEMOP_HLP_DONE_DECODING();
1373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1374}
1375#else
1376FNIEMOP_DEF(iemOp_Grp7_vmresume)
1377{
1378 IEMOP_BITCH_ABOUT_STUB();
1379 return IEMOP_RAISE_INVALID_OPCODE();
1380}
1381#endif
1382
1383
1384/** Opcode 0x0f 0x01 /0. */
1385#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1386FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1387{
1388 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1389 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1390 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1391 IEMOP_HLP_DONE_DECODING();
1392 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1393}
1394#else
1395FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1396{
1397 IEMOP_BITCH_ABOUT_STUB();
1398 return IEMOP_RAISE_INVALID_OPCODE();
1399}
1400#endif
1401
1402
1403/** Opcode 0x0f 0x01 /1. */
1404FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1405{
1406 IEMOP_MNEMONIC(sidt, "sidt Ms");
1407 IEMOP_HLP_MIN_286();
1408 IEMOP_HLP_64BIT_OP_SIZE();
1409 IEM_MC_BEGIN(2, 1);
1410 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1411 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1414 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1415 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1416 IEM_MC_END();
1417 return VINF_SUCCESS;
1418}
1419
1420
1421/** Opcode 0x0f 0x01 /1. */
1422FNIEMOP_DEF(iemOp_Grp7_monitor)
1423{
1424 IEMOP_MNEMONIC(monitor, "monitor");
1425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1426 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1427}
1428
1429
1430/** Opcode 0x0f 0x01 /1. */
1431FNIEMOP_DEF(iemOp_Grp7_mwait)
1432{
1433 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1435 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1436}
1437
1438
1439/** Opcode 0x0f 0x01 /2. */
1440FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1441{
1442 IEMOP_MNEMONIC(lgdt, "lgdt");
1443 IEMOP_HLP_64BIT_OP_SIZE();
1444 IEM_MC_BEGIN(3, 1);
1445 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1446 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1447 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1450 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1451 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1452 IEM_MC_END();
1453 return VINF_SUCCESS;
1454}
1455
1456
1457/** Opcode 0x0f 0x01 0xd0. */
1458FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1459{
1460 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1461 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1462 {
1463 /** @todo r=ramshankar: We should use
1464 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1465 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1466 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1467 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1468 }
1469 return IEMOP_RAISE_INVALID_OPCODE();
1470}
1471
1472
1473/** Opcode 0x0f 0x01 0xd1. */
1474FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1475{
1476 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1477 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1478 {
1479 /** @todo r=ramshankar: We should use
1480 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1481 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1482 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1483 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1484 }
1485 return IEMOP_RAISE_INVALID_OPCODE();
1486}
1487
1488
1489/** Opcode 0x0f 0x01 /3. */
1490FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1491{
1492 IEMOP_MNEMONIC(lidt, "lidt");
1493 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1494 ? IEMMODE_64BIT
1495 : pVCpu->iem.s.enmEffOpSize;
1496 IEM_MC_BEGIN(3, 1);
1497 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1498 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1499 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1502 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1503 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1504 IEM_MC_END();
1505 return VINF_SUCCESS;
1506}
1507
1508
1509/** Opcode 0x0f 0x01 0xd8. */
1510#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1511FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1512{
1513 IEMOP_MNEMONIC(vmrun, "vmrun");
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1515 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1516}
1517#else
1518FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1519#endif
1520
1521/** Opcode 0x0f 0x01 0xd9. */
1522FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1523{
1524 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1526
1527 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1528 want all hypercalls regardless of instruction used, and if a
1529 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1530 (NEM/win makes ASSUMPTIONS about this behavior.) */
1531 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1532}
1533
1534/** Opcode 0x0f 0x01 0xda. */
1535#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1536FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1537{
1538 IEMOP_MNEMONIC(vmload, "vmload");
1539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1540 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1541}
1542#else
1543FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1544#endif
1545
1546
1547/** Opcode 0x0f 0x01 0xdb. */
1548#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1549FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1550{
1551 IEMOP_MNEMONIC(vmsave, "vmsave");
1552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1553 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1554}
1555#else
1556FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1557#endif
1558
1559
1560/** Opcode 0x0f 0x01 0xdc. */
1561#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1562FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1563{
1564 IEMOP_MNEMONIC(stgi, "stgi");
1565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1566 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1567}
1568#else
1569FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1570#endif
1571
1572
1573/** Opcode 0x0f 0x01 0xdd. */
1574#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1575FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1576{
1577 IEMOP_MNEMONIC(clgi, "clgi");
1578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1579 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1580}
1581#else
1582FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1583#endif
1584
1585
1586/** Opcode 0x0f 0x01 0xdf. */
1587#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1588FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1589{
1590 IEMOP_MNEMONIC(invlpga, "invlpga");
1591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1592 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1593}
1594#else
1595FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1596#endif
1597
1598
1599/** Opcode 0x0f 0x01 0xde. */
1600#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1601FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1602{
1603 IEMOP_MNEMONIC(skinit, "skinit");
1604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1605 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1606}
1607#else
1608FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1609#endif
1610
1611
1612/** Opcode 0x0f 0x01 /4. */
1613FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1614{
1615 IEMOP_MNEMONIC(smsw, "smsw");
1616 IEMOP_HLP_MIN_286();
1617 if (IEM_IS_MODRM_REG_MODE(bRm))
1618 {
1619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1620 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1621 }
1622
1623 /* Ignore operand size here, memory refs are always 16-bit. */
1624 IEM_MC_BEGIN(2, 0);
1625 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1626 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1629 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1630 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1631 IEM_MC_END();
1632 return VINF_SUCCESS;
1633}
1634
1635
1636/** Opcode 0x0f 0x01 /6. */
1637FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1638{
1639 /* The operand size is effectively ignored, all is 16-bit and only the
1640 lower 3-bits are used. */
1641 IEMOP_MNEMONIC(lmsw, "lmsw");
1642 IEMOP_HLP_MIN_286();
1643 if (IEM_IS_MODRM_REG_MODE(bRm))
1644 {
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1646 IEM_MC_BEGIN(2, 0);
1647 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1648 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1649 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1650 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1651 IEM_MC_END();
1652 }
1653 else
1654 {
1655 IEM_MC_BEGIN(2, 0);
1656 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1657 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1660 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1661 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1662 IEM_MC_END();
1663 }
1664 return VINF_SUCCESS;
1665}
1666
1667
1668/** Opcode 0x0f 0x01 /7. */
1669FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1670{
1671 IEMOP_MNEMONIC(invlpg, "invlpg");
1672 IEMOP_HLP_MIN_486();
1673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1674 IEM_MC_BEGIN(1, 1);
1675 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1677 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1678 IEM_MC_END();
1679 return VINF_SUCCESS;
1680}
1681
1682
1683/** Opcode 0x0f 0x01 /7. */
1684FNIEMOP_DEF(iemOp_Grp7_swapgs)
1685{
1686 IEMOP_MNEMONIC(swapgs, "swapgs");
1687 IEMOP_HLP_ONLY_64BIT();
1688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1689 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1690}
1691
1692
1693/** Opcode 0x0f 0x01 /7. */
1694FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1695{
1696 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1698 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1699}
1700
1701
1702/**
1703 * Group 7 jump table, memory variant.
1704 */
1705IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1706{
1707 iemOp_Grp7_sgdt,
1708 iemOp_Grp7_sidt,
1709 iemOp_Grp7_lgdt,
1710 iemOp_Grp7_lidt,
1711 iemOp_Grp7_smsw,
1712 iemOp_InvalidWithRM,
1713 iemOp_Grp7_lmsw,
1714 iemOp_Grp7_invlpg
1715};
1716
1717
1718/** Opcode 0x0f 0x01. */
1719FNIEMOP_DEF(iemOp_Grp7)
1720{
1721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1722 if (IEM_IS_MODRM_MEM_MODE(bRm))
1723 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1724
1725 switch (IEM_GET_MODRM_REG_8(bRm))
1726 {
1727 case 0:
1728 switch (IEM_GET_MODRM_RM_8(bRm))
1729 {
1730 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1731 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1732 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1733 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1734 }
1735 return IEMOP_RAISE_INVALID_OPCODE();
1736
1737 case 1:
1738 switch (IEM_GET_MODRM_RM_8(bRm))
1739 {
1740 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1741 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1742 }
1743 return IEMOP_RAISE_INVALID_OPCODE();
1744
1745 case 2:
1746 switch (IEM_GET_MODRM_RM_8(bRm))
1747 {
1748 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1749 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1750 }
1751 return IEMOP_RAISE_INVALID_OPCODE();
1752
1753 case 3:
1754 switch (IEM_GET_MODRM_RM_8(bRm))
1755 {
1756 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1757 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1758 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1759 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1760 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1761 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1762 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1763 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1765 }
1766
1767 case 4:
1768 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1769
1770 case 5:
1771 return IEMOP_RAISE_INVALID_OPCODE();
1772
1773 case 6:
1774 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1775
1776 case 7:
1777 switch (IEM_GET_MODRM_RM_8(bRm))
1778 {
1779 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1780 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1781 }
1782 return IEMOP_RAISE_INVALID_OPCODE();
1783
1784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1785 }
1786}
1787
1788/** Opcode 0x0f 0x00 /3. */
1789FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1790{
1791 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1793
1794 if (IEM_IS_MODRM_REG_MODE(bRm))
1795 {
1796 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1797 switch (pVCpu->iem.s.enmEffOpSize)
1798 {
1799 case IEMMODE_16BIT:
1800 {
1801 IEM_MC_BEGIN(3, 0);
1802 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1803 IEM_MC_ARG(uint16_t, u16Sel, 1);
1804 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1805
1806 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1807 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1808 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1809
1810 IEM_MC_END();
1811 return VINF_SUCCESS;
1812 }
1813
1814 case IEMMODE_32BIT:
1815 case IEMMODE_64BIT:
1816 {
1817 IEM_MC_BEGIN(3, 0);
1818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1819 IEM_MC_ARG(uint16_t, u16Sel, 1);
1820 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1821
1822 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1823 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1825
1826 IEM_MC_END();
1827 return VINF_SUCCESS;
1828 }
1829
1830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1831 }
1832 }
1833 else
1834 {
1835 switch (pVCpu->iem.s.enmEffOpSize)
1836 {
1837 case IEMMODE_16BIT:
1838 {
1839 IEM_MC_BEGIN(3, 1);
1840 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1841 IEM_MC_ARG(uint16_t, u16Sel, 1);
1842 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1844
1845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1846 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1847
1848 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1849 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1850 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1851
1852 IEM_MC_END();
1853 return VINF_SUCCESS;
1854 }
1855
1856 case IEMMODE_32BIT:
1857 case IEMMODE_64BIT:
1858 {
1859 IEM_MC_BEGIN(3, 1);
1860 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1861 IEM_MC_ARG(uint16_t, u16Sel, 1);
1862 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1864
1865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1866 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1867/** @todo testcase: make sure it's a 16-bit read. */
1868
1869 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1870 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1871 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1872
1873 IEM_MC_END();
1874 return VINF_SUCCESS;
1875 }
1876
1877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1878 }
1879 }
1880}
1881
1882
1883
1884/** Opcode 0x0f 0x02. */
1885FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1886{
1887 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1888 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1889}
1890
1891
1892/** Opcode 0x0f 0x03. */
1893FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1894{
1895 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1896 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1897}
1898
1899
1900/** Opcode 0x0f 0x05. */
1901FNIEMOP_DEF(iemOp_syscall)
1902{
1903 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1905 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1906}
1907
1908
1909/** Opcode 0x0f 0x06. */
1910FNIEMOP_DEF(iemOp_clts)
1911{
1912 IEMOP_MNEMONIC(clts, "clts");
1913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1914 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1915}
1916
1917
1918/** Opcode 0x0f 0x07. */
1919FNIEMOP_DEF(iemOp_sysret)
1920{
1921 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1924}
1925
1926
1927/** Opcode 0x0f 0x08. */
1928FNIEMOP_DEF(iemOp_invd)
1929{
1930 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1931 IEMOP_HLP_MIN_486();
1932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1933 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1934}
1935
1936
1937/** Opcode 0x0f 0x09. */
1938FNIEMOP_DEF(iemOp_wbinvd)
1939{
1940 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1941 IEMOP_HLP_MIN_486();
1942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1943 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1944}
1945
1946
1947/** Opcode 0x0f 0x0b. */
1948FNIEMOP_DEF(iemOp_ud2)
1949{
1950 IEMOP_MNEMONIC(ud2, "ud2");
1951 return IEMOP_RAISE_INVALID_OPCODE();
1952}
1953
1954/** Opcode 0x0f 0x0d. */
1955FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1956{
1957 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1958 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1959 {
1960 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1961 return IEMOP_RAISE_INVALID_OPCODE();
1962 }
1963
1964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1965 if (IEM_IS_MODRM_REG_MODE(bRm))
1966 {
1967 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1968 return IEMOP_RAISE_INVALID_OPCODE();
1969 }
1970
1971 switch (IEM_GET_MODRM_REG_8(bRm))
1972 {
1973 case 2: /* Aliased to /0 for the time being. */
1974 case 4: /* Aliased to /0 for the time being. */
1975 case 5: /* Aliased to /0 for the time being. */
1976 case 6: /* Aliased to /0 for the time being. */
1977 case 7: /* Aliased to /0 for the time being. */
1978 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1979 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1980 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1982 }
1983
1984 IEM_MC_BEGIN(0, 1);
1985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1988 /* Currently a NOP. */
1989 NOREF(GCPtrEffSrc);
1990 IEM_MC_ADVANCE_RIP();
1991 IEM_MC_END();
1992 return VINF_SUCCESS;
1993}
1994
1995
1996/** Opcode 0x0f 0x0e. */
1997FNIEMOP_DEF(iemOp_femms)
1998{
1999 IEMOP_MNEMONIC(femms, "femms");
2000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2001
2002 IEM_MC_BEGIN(0,0);
2003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2004 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2005 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2006 IEM_MC_FPU_FROM_MMX_MODE();
2007 IEM_MC_ADVANCE_RIP();
2008 IEM_MC_END();
2009 return VINF_SUCCESS;
2010}
2011
2012
2013/** Opcode 0x0f 0x0f. */
2014FNIEMOP_DEF(iemOp_3Dnow)
2015{
2016 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2017 {
2018 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2019 return IEMOP_RAISE_INVALID_OPCODE();
2020 }
2021
2022#ifdef IEM_WITH_3DNOW
2023 /* This is pretty sparse, use switch instead of table. */
2024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2025 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2026#else
2027 IEMOP_BITCH_ABOUT_STUB();
2028 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2029#endif
2030}
2031
2032
2033/**
2034 * @opcode 0x10
2035 * @oppfx none
2036 * @opcpuid sse
2037 * @opgroup og_sse_simdfp_datamove
2038 * @opxcpttype 4UA
2039 * @optest op1=1 op2=2 -> op1=2
2040 * @optest op1=0 op2=-22 -> op1=-22
2041 */
2042FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2043{
2044 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if (IEM_IS_MODRM_REG_MODE(bRm))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2056 IEM_GET_MODRM_RM(pVCpu, bRm));
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Memory, register.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081
2082}
2083
2084
2085/**
2086 * @opcode 0x10
2087 * @oppfx 0x66
2088 * @opcpuid sse2
2089 * @opgroup og_sse2_pcksclr_datamove
2090 * @opxcpttype 4UA
2091 * @optest op1=1 op2=2 -> op1=2
2092 * @optest op1=0 op2=-42 -> op1=-42
2093 */
2094FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2095{
2096 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2098 if (IEM_IS_MODRM_REG_MODE(bRm))
2099 {
2100 /*
2101 * Register, register.
2102 */
2103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2104 IEM_MC_BEGIN(0, 0);
2105 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2106 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2107 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2108 IEM_GET_MODRM_RM(pVCpu, bRm));
2109 IEM_MC_ADVANCE_RIP();
2110 IEM_MC_END();
2111 }
2112 else
2113 {
2114 /*
2115 * Memory, register.
2116 */
2117 IEM_MC_BEGIN(0, 2);
2118 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2120
2121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2123 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2124 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2125
2126 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2127 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2128
2129 IEM_MC_ADVANCE_RIP();
2130 IEM_MC_END();
2131 }
2132 return VINF_SUCCESS;
2133}
2134
2135
2136/**
2137 * @opcode 0x10
2138 * @oppfx 0xf3
2139 * @opcpuid sse
2140 * @opgroup og_sse_simdfp_datamove
2141 * @opxcpttype 5
2142 * @optest op1=1 op2=2 -> op1=2
2143 * @optest op1=0 op2=-22 -> op1=-22
2144 */
2145FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2146{
2147 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2149 if (IEM_IS_MODRM_REG_MODE(bRm))
2150 {
2151 /*
2152 * Register, register.
2153 */
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(0, 1);
2156 IEM_MC_LOCAL(uint32_t, uSrc);
2157
2158 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2160 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2161 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2162
2163 IEM_MC_ADVANCE_RIP();
2164 IEM_MC_END();
2165 }
2166 else
2167 {
2168 /*
2169 * Memory, register.
2170 */
2171 IEM_MC_BEGIN(0, 2);
2172 IEM_MC_LOCAL(uint32_t, uSrc);
2173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2174
2175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2178 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2179
2180 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2181 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2182
2183 IEM_MC_ADVANCE_RIP();
2184 IEM_MC_END();
2185 }
2186 return VINF_SUCCESS;
2187}
2188
2189
2190/**
2191 * @opcode 0x10
2192 * @oppfx 0xf2
2193 * @opcpuid sse2
2194 * @opgroup og_sse2_pcksclr_datamove
2195 * @opxcpttype 5
2196 * @optest op1=1 op2=2 -> op1=2
2197 * @optest op1=0 op2=-42 -> op1=-42
2198 */
2199FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2200{
2201 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2203 if (IEM_IS_MODRM_REG_MODE(bRm))
2204 {
2205 /*
2206 * Register, register.
2207 */
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEM_MC_BEGIN(0, 1);
2210 IEM_MC_LOCAL(uint64_t, uSrc);
2211
2212 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2213 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2214 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2215 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2216
2217 IEM_MC_ADVANCE_RIP();
2218 IEM_MC_END();
2219 }
2220 else
2221 {
2222 /*
2223 * Memory, register.
2224 */
2225 IEM_MC_BEGIN(0, 2);
2226 IEM_MC_LOCAL(uint64_t, uSrc);
2227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2228
2229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2231 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2232 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2233
2234 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2235 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2236
2237 IEM_MC_ADVANCE_RIP();
2238 IEM_MC_END();
2239 }
2240 return VINF_SUCCESS;
2241}
2242
2243
2244/**
2245 * @opcode 0x11
2246 * @oppfx none
2247 * @opcpuid sse
2248 * @opgroup og_sse_simdfp_datamove
2249 * @opxcpttype 4UA
2250 * @optest op1=1 op2=2 -> op1=2
2251 * @optest op1=0 op2=-42 -> op1=-42
2252 */
2253FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2254{
2255 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2257 if (IEM_IS_MODRM_REG_MODE(bRm))
2258 {
2259 /*
2260 * Register, register.
2261 */
2262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2263 IEM_MC_BEGIN(0, 0);
2264 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2265 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2266 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2267 IEM_GET_MODRM_REG(pVCpu, bRm));
2268 IEM_MC_ADVANCE_RIP();
2269 IEM_MC_END();
2270 }
2271 else
2272 {
2273 /*
2274 * Memory, register.
2275 */
2276 IEM_MC_BEGIN(0, 2);
2277 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2279
2280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2283 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2284
2285 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2286 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2287
2288 IEM_MC_ADVANCE_RIP();
2289 IEM_MC_END();
2290 }
2291 return VINF_SUCCESS;
2292}
2293
2294
2295/**
2296 * @opcode 0x11
2297 * @oppfx 0x66
2298 * @opcpuid sse2
2299 * @opgroup og_sse2_pcksclr_datamove
2300 * @opxcpttype 4UA
2301 * @optest op1=1 op2=2 -> op1=2
2302 * @optest op1=0 op2=-42 -> op1=-42
2303 */
2304FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2305{
2306 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2308 if (IEM_IS_MODRM_REG_MODE(bRm))
2309 {
2310 /*
2311 * Register, register.
2312 */
2313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2314 IEM_MC_BEGIN(0, 0);
2315 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2316 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2317 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2318 IEM_GET_MODRM_REG(pVCpu, bRm));
2319 IEM_MC_ADVANCE_RIP();
2320 IEM_MC_END();
2321 }
2322 else
2323 {
2324 /*
2325 * Memory, register.
2326 */
2327 IEM_MC_BEGIN(0, 2);
2328 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2330
2331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2333 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2334 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2335
2336 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2337 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2338
2339 IEM_MC_ADVANCE_RIP();
2340 IEM_MC_END();
2341 }
2342 return VINF_SUCCESS;
2343}
2344
2345
2346/**
2347 * @opcode 0x11
2348 * @oppfx 0xf3
2349 * @opcpuid sse
2350 * @opgroup og_sse_simdfp_datamove
2351 * @opxcpttype 5
2352 * @optest op1=1 op2=2 -> op1=2
2353 * @optest op1=0 op2=-22 -> op1=-22
2354 */
2355FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2356{
2357 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2359 if (IEM_IS_MODRM_REG_MODE(bRm))
2360 {
2361 /*
2362 * Register, register.
2363 */
2364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2365 IEM_MC_BEGIN(0, 1);
2366 IEM_MC_LOCAL(uint32_t, uSrc);
2367
2368 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2369 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2370 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2371 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2372
2373 IEM_MC_ADVANCE_RIP();
2374 IEM_MC_END();
2375 }
2376 else
2377 {
2378 /*
2379 * Memory, register.
2380 */
2381 IEM_MC_BEGIN(0, 2);
2382 IEM_MC_LOCAL(uint32_t, uSrc);
2383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2384
2385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2387 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2388 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2389
2390 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2391 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2392
2393 IEM_MC_ADVANCE_RIP();
2394 IEM_MC_END();
2395 }
2396 return VINF_SUCCESS;
2397}
2398
2399
2400/**
2401 * @opcode 0x11
2402 * @oppfx 0xf2
2403 * @opcpuid sse2
2404 * @opgroup og_sse2_pcksclr_datamove
2405 * @opxcpttype 5
2406 * @optest op1=1 op2=2 -> op1=2
2407 * @optest op1=0 op2=-42 -> op1=-42
2408 */
2409FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2410{
2411 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2413 if (IEM_IS_MODRM_REG_MODE(bRm))
2414 {
2415 /*
2416 * Register, register.
2417 */
2418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2419 IEM_MC_BEGIN(0, 1);
2420 IEM_MC_LOCAL(uint64_t, uSrc);
2421
2422 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2423 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2424 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2425 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2426
2427 IEM_MC_ADVANCE_RIP();
2428 IEM_MC_END();
2429 }
2430 else
2431 {
2432 /*
2433 * Memory, register.
2434 */
2435 IEM_MC_BEGIN(0, 2);
2436 IEM_MC_LOCAL(uint64_t, uSrc);
2437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2438
2439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2441 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2443
2444 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2445 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2446
2447 IEM_MC_ADVANCE_RIP();
2448 IEM_MC_END();
2449 }
2450 return VINF_SUCCESS;
2451}
2452
2453
2454FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2455{
2456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2457 if (IEM_IS_MODRM_REG_MODE(bRm))
2458 {
2459 /**
2460 * @opcode 0x12
2461 * @opcodesub 11 mr/reg
2462 * @oppfx none
2463 * @opcpuid sse
2464 * @opgroup og_sse_simdfp_datamove
2465 * @opxcpttype 5
2466 * @optest op1=1 op2=2 -> op1=2
2467 * @optest op1=0 op2=-42 -> op1=-42
2468 */
2469 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2470
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_BEGIN(0, 1);
2473 IEM_MC_LOCAL(uint64_t, uSrc);
2474
2475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2476 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2477 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2478 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2479
2480 IEM_MC_ADVANCE_RIP();
2481 IEM_MC_END();
2482 }
2483 else
2484 {
2485 /**
2486 * @opdone
2487 * @opcode 0x12
2488 * @opcodesub !11 mr/reg
2489 * @oppfx none
2490 * @opcpuid sse
2491 * @opgroup og_sse_simdfp_datamove
2492 * @opxcpttype 5
2493 * @optest op1=1 op2=2 -> op1=2
2494 * @optest op1=0 op2=-42 -> op1=-42
2495 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2496 */
2497 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2498
2499 IEM_MC_BEGIN(0, 2);
2500 IEM_MC_LOCAL(uint64_t, uSrc);
2501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2502
2503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2505 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2507
2508 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2509 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2510
2511 IEM_MC_ADVANCE_RIP();
2512 IEM_MC_END();
2513 }
2514 return VINF_SUCCESS;
2515}
2516
2517
2518/**
2519 * @opcode 0x12
2520 * @opcodesub !11 mr/reg
2521 * @oppfx 0x66
2522 * @opcpuid sse2
2523 * @opgroup og_sse2_pcksclr_datamove
2524 * @opxcpttype 5
2525 * @optest op1=1 op2=2 -> op1=2
2526 * @optest op1=0 op2=-42 -> op1=-42
2527 */
2528FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2529{
2530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2531 if (IEM_IS_MODRM_MEM_MODE(bRm))
2532 {
2533 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2534
2535 IEM_MC_BEGIN(0, 2);
2536 IEM_MC_LOCAL(uint64_t, uSrc);
2537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2538
2539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2543
2544 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2545 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2546
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 return VINF_SUCCESS;
2550 }
2551
2552 /**
2553 * @opdone
2554 * @opmnemonic ud660f12m3
2555 * @opcode 0x12
2556 * @opcodesub 11 mr/reg
2557 * @oppfx 0x66
2558 * @opunused immediate
2559 * @opcpuid sse
2560 * @optest ->
2561 */
2562 return IEMOP_RAISE_INVALID_OPCODE();
2563}
2564
2565
2566/**
2567 * @opcode 0x12
2568 * @oppfx 0xf3
2569 * @opcpuid sse3
2570 * @opgroup og_sse3_pcksclr_datamove
2571 * @opxcpttype 4
2572 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2573 * op1=0x00000002000000020000000100000001
2574 */
2575FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2576{
2577 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2579 if (IEM_IS_MODRM_REG_MODE(bRm))
2580 {
2581 /*
2582 * Register, register.
2583 */
2584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2585 IEM_MC_BEGIN(2, 0);
2586 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2587 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2588
2589 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2590 IEM_MC_PREPARE_SSE_USAGE();
2591
2592 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2594 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2595
2596 IEM_MC_ADVANCE_RIP();
2597 IEM_MC_END();
2598 }
2599 else
2600 {
2601 /*
2602 * Register, memory.
2603 */
2604 IEM_MC_BEGIN(2, 2);
2605 IEM_MC_LOCAL(RTUINT128U, uSrc);
2606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2607 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2608 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2609
2610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2612 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2613 IEM_MC_PREPARE_SSE_USAGE();
2614
2615 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2616 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2617 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2618
2619 IEM_MC_ADVANCE_RIP();
2620 IEM_MC_END();
2621 }
2622 return VINF_SUCCESS;
2623}
2624
2625
2626/**
2627 * @opcode 0x12
2628 * @oppfx 0xf2
2629 * @opcpuid sse3
2630 * @opgroup og_sse3_pcksclr_datamove
2631 * @opxcpttype 5
2632 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2633 * op1=0x22222222111111112222222211111111
2634 */
2635FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2636{
2637 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2639 if (IEM_IS_MODRM_REG_MODE(bRm))
2640 {
2641 /*
2642 * Register, register.
2643 */
2644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2645 IEM_MC_BEGIN(2, 0);
2646 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2647 IEM_MC_ARG(uint64_t, uSrc, 1);
2648
2649 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2650 IEM_MC_PREPARE_SSE_USAGE();
2651
2652 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2654 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2655
2656 IEM_MC_ADVANCE_RIP();
2657 IEM_MC_END();
2658 }
2659 else
2660 {
2661 /*
2662 * Register, memory.
2663 */
2664 IEM_MC_BEGIN(2, 2);
2665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2666 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2667 IEM_MC_ARG(uint64_t, uSrc, 1);
2668
2669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2671 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2672 IEM_MC_PREPARE_SSE_USAGE();
2673
2674 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2675 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2676 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2677
2678 IEM_MC_ADVANCE_RIP();
2679 IEM_MC_END();
2680 }
2681 return VINF_SUCCESS;
2682}
2683
2684
2685/**
2686 * @opcode 0x13
2687 * @opcodesub !11 mr/reg
2688 * @oppfx none
2689 * @opcpuid sse
2690 * @opgroup og_sse_simdfp_datamove
2691 * @opxcpttype 5
2692 * @optest op1=1 op2=2 -> op1=2
2693 * @optest op1=0 op2=-42 -> op1=-42
2694 */
2695FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2696{
2697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2698 if (IEM_IS_MODRM_MEM_MODE(bRm))
2699 {
2700 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2701
2702 IEM_MC_BEGIN(0, 2);
2703 IEM_MC_LOCAL(uint64_t, uSrc);
2704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2705
2706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2708 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2709 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2710
2711 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2712 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2713
2714 IEM_MC_ADVANCE_RIP();
2715 IEM_MC_END();
2716 return VINF_SUCCESS;
2717 }
2718
2719 /**
2720 * @opdone
2721 * @opmnemonic ud0f13m3
2722 * @opcode 0x13
2723 * @opcodesub 11 mr/reg
2724 * @oppfx none
2725 * @opunused immediate
2726 * @opcpuid sse
2727 * @optest ->
2728 */
2729 return IEMOP_RAISE_INVALID_OPCODE();
2730}
2731
2732
2733/**
2734 * @opcode 0x13
2735 * @opcodesub !11 mr/reg
2736 * @oppfx 0x66
2737 * @opcpuid sse2
2738 * @opgroup og_sse2_pcksclr_datamove
2739 * @opxcpttype 5
2740 * @optest op1=1 op2=2 -> op1=2
2741 * @optest op1=0 op2=-42 -> op1=-42
2742 */
2743FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2744{
2745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2746 if (IEM_IS_MODRM_MEM_MODE(bRm))
2747 {
2748 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2749 IEM_MC_BEGIN(0, 2);
2750 IEM_MC_LOCAL(uint64_t, uSrc);
2751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2752
2753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2755 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2756 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2757
2758 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2759 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2760
2761 IEM_MC_ADVANCE_RIP();
2762 IEM_MC_END();
2763 return VINF_SUCCESS;
2764 }
2765
2766 /**
2767 * @opdone
2768 * @opmnemonic ud660f13m3
2769 * @opcode 0x13
2770 * @opcodesub 11 mr/reg
2771 * @oppfx 0x66
2772 * @opunused immediate
2773 * @opcpuid sse
2774 * @optest ->
2775 */
2776 return IEMOP_RAISE_INVALID_OPCODE();
2777}
2778
2779
2780/**
2781 * @opmnemonic udf30f13
2782 * @opcode 0x13
2783 * @oppfx 0xf3
2784 * @opunused intel-modrm
2785 * @opcpuid sse
2786 * @optest ->
2787 * @opdone
2788 */
2789
2790/**
2791 * @opmnemonic udf20f13
2792 * @opcode 0x13
2793 * @oppfx 0xf2
2794 * @opunused intel-modrm
2795 * @opcpuid sse
2796 * @optest ->
2797 * @opdone
2798 */
2799
2800/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2801FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2802{
2803 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2804 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2805}
2806
2807
2808/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2809FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2810{
2811 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2812 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2813}
2814
2815
2816/**
2817 * @opdone
2818 * @opmnemonic udf30f14
2819 * @opcode 0x14
2820 * @oppfx 0xf3
2821 * @opunused intel-modrm
2822 * @opcpuid sse
2823 * @optest ->
2824 * @opdone
2825 */
2826
2827/**
2828 * @opmnemonic udf20f14
2829 * @opcode 0x14
2830 * @oppfx 0xf2
2831 * @opunused intel-modrm
2832 * @opcpuid sse
2833 * @optest ->
2834 * @opdone
2835 */
2836
2837/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2838FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2839{
2840 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2841 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2842}
2843
2844
2845/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2846FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2847{
2848 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2849 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2850}
2851
2852
2853/* Opcode 0xf3 0x0f 0x15 - invalid */
2854/* Opcode 0xf2 0x0f 0x15 - invalid */
2855
2856/**
2857 * @opdone
2858 * @opmnemonic udf30f15
2859 * @opcode 0x15
2860 * @oppfx 0xf3
2861 * @opunused intel-modrm
2862 * @opcpuid sse
2863 * @optest ->
2864 * @opdone
2865 */
2866
2867/**
2868 * @opmnemonic udf20f15
2869 * @opcode 0x15
2870 * @oppfx 0xf2
2871 * @opunused intel-modrm
2872 * @opcpuid sse
2873 * @optest ->
2874 * @opdone
2875 */
2876
2877FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2878{
2879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2880 if (IEM_IS_MODRM_REG_MODE(bRm))
2881 {
2882 /**
2883 * @opcode 0x16
2884 * @opcodesub 11 mr/reg
2885 * @oppfx none
2886 * @opcpuid sse
2887 * @opgroup og_sse_simdfp_datamove
2888 * @opxcpttype 5
2889 * @optest op1=1 op2=2 -> op1=2
2890 * @optest op1=0 op2=-42 -> op1=-42
2891 */
2892 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2893
2894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2895 IEM_MC_BEGIN(0, 1);
2896 IEM_MC_LOCAL(uint64_t, uSrc);
2897
2898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2899 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2900 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2901 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2902
2903 IEM_MC_ADVANCE_RIP();
2904 IEM_MC_END();
2905 }
2906 else
2907 {
2908 /**
2909 * @opdone
2910 * @opcode 0x16
2911 * @opcodesub !11 mr/reg
2912 * @oppfx none
2913 * @opcpuid sse
2914 * @opgroup og_sse_simdfp_datamove
2915 * @opxcpttype 5
2916 * @optest op1=1 op2=2 -> op1=2
2917 * @optest op1=0 op2=-42 -> op1=-42
2918 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2919 */
2920 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2921
2922 IEM_MC_BEGIN(0, 2);
2923 IEM_MC_LOCAL(uint64_t, uSrc);
2924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2925
2926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2928 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2929 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2930
2931 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2932 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2933
2934 IEM_MC_ADVANCE_RIP();
2935 IEM_MC_END();
2936 }
2937 return VINF_SUCCESS;
2938}
2939
2940
2941/**
2942 * @opcode 0x16
2943 * @opcodesub !11 mr/reg
2944 * @oppfx 0x66
2945 * @opcpuid sse2
2946 * @opgroup og_sse2_pcksclr_datamove
2947 * @opxcpttype 5
2948 * @optest op1=1 op2=2 -> op1=2
2949 * @optest op1=0 op2=-42 -> op1=-42
2950 */
2951FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2952{
2953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2954 if (IEM_IS_MODRM_MEM_MODE(bRm))
2955 {
2956 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2957 IEM_MC_BEGIN(0, 2);
2958 IEM_MC_LOCAL(uint64_t, uSrc);
2959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2960
2961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2964 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2965
2966 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2967 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2968
2969 IEM_MC_ADVANCE_RIP();
2970 IEM_MC_END();
2971 return VINF_SUCCESS;
2972 }
2973
2974 /**
2975 * @opdone
2976 * @opmnemonic ud660f16m3
2977 * @opcode 0x16
2978 * @opcodesub 11 mr/reg
2979 * @oppfx 0x66
2980 * @opunused immediate
2981 * @opcpuid sse
2982 * @optest ->
2983 */
2984 return IEMOP_RAISE_INVALID_OPCODE();
2985}
2986
2987
2988/**
2989 * @opcode 0x16
2990 * @oppfx 0xf3
2991 * @opcpuid sse3
2992 * @opgroup og_sse3_pcksclr_datamove
2993 * @opxcpttype 4
2994 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2995 * op1=0x00000002000000020000000100000001
2996 */
2997FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2998{
2999 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3001 if (IEM_IS_MODRM_REG_MODE(bRm))
3002 {
3003 /*
3004 * Register, register.
3005 */
3006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3007 IEM_MC_BEGIN(2, 0);
3008 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3009 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3010
3011 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3012 IEM_MC_PREPARE_SSE_USAGE();
3013
3014 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3015 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3016 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3017
3018 IEM_MC_ADVANCE_RIP();
3019 IEM_MC_END();
3020 }
3021 else
3022 {
3023 /*
3024 * Register, memory.
3025 */
3026 IEM_MC_BEGIN(2, 2);
3027 IEM_MC_LOCAL(RTUINT128U, uSrc);
3028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3029 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3030 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3031
3032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
3035 IEM_MC_PREPARE_SSE_USAGE();
3036
3037 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3038 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3039 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
3040
3041 IEM_MC_ADVANCE_RIP();
3042 IEM_MC_END();
3043 }
3044 return VINF_SUCCESS;
3045}
3046
3047/**
3048 * @opdone
3049 * @opmnemonic udf30f16
3050 * @opcode 0x16
3051 * @oppfx 0xf2
3052 * @opunused intel-modrm
3053 * @opcpuid sse
3054 * @optest ->
3055 * @opdone
3056 */
3057
3058
3059/**
3060 * @opcode 0x17
3061 * @opcodesub !11 mr/reg
3062 * @oppfx none
3063 * @opcpuid sse
3064 * @opgroup og_sse_simdfp_datamove
3065 * @opxcpttype 5
3066 * @optest op1=1 op2=2 -> op1=2
3067 * @optest op1=0 op2=-42 -> op1=-42
3068 */
3069FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3070{
3071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3072 if (IEM_IS_MODRM_MEM_MODE(bRm))
3073 {
3074 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3075
3076 IEM_MC_BEGIN(0, 2);
3077 IEM_MC_LOCAL(uint64_t, uSrc);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3079
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3082 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3083 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3084
3085 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3086 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3087
3088 IEM_MC_ADVANCE_RIP();
3089 IEM_MC_END();
3090 return VINF_SUCCESS;
3091 }
3092
3093 /**
3094 * @opdone
3095 * @opmnemonic ud0f17m3
3096 * @opcode 0x17
3097 * @opcodesub 11 mr/reg
3098 * @oppfx none
3099 * @opunused immediate
3100 * @opcpuid sse
3101 * @optest ->
3102 */
3103 return IEMOP_RAISE_INVALID_OPCODE();
3104}
3105
3106
3107/**
3108 * @opcode 0x17
3109 * @opcodesub !11 mr/reg
3110 * @oppfx 0x66
3111 * @opcpuid sse2
3112 * @opgroup og_sse2_pcksclr_datamove
3113 * @opxcpttype 5
3114 * @optest op1=1 op2=2 -> op1=2
3115 * @optest op1=0 op2=-42 -> op1=-42
3116 */
3117FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3118{
3119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3120 if (IEM_IS_MODRM_MEM_MODE(bRm))
3121 {
3122 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3123
3124 IEM_MC_BEGIN(0, 2);
3125 IEM_MC_LOCAL(uint64_t, uSrc);
3126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3127
3128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3131 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3132
3133 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3134 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3135
3136 IEM_MC_ADVANCE_RIP();
3137 IEM_MC_END();
3138 return VINF_SUCCESS;
3139 }
3140
3141 /**
3142 * @opdone
3143 * @opmnemonic ud660f17m3
3144 * @opcode 0x17
3145 * @opcodesub 11 mr/reg
3146 * @oppfx 0x66
3147 * @opunused immediate
3148 * @opcpuid sse
3149 * @optest ->
3150 */
3151 return IEMOP_RAISE_INVALID_OPCODE();
3152}
3153
3154
3155/**
3156 * @opdone
3157 * @opmnemonic udf30f17
3158 * @opcode 0x17
3159 * @oppfx 0xf3
3160 * @opunused intel-modrm
3161 * @opcpuid sse
3162 * @optest ->
3163 * @opdone
3164 */
3165
3166/**
3167 * @opmnemonic udf20f17
3168 * @opcode 0x17
3169 * @oppfx 0xf2
3170 * @opunused intel-modrm
3171 * @opcpuid sse
3172 * @optest ->
3173 * @opdone
3174 */
3175
3176
3177/** Opcode 0x0f 0x18. */
3178FNIEMOP_DEF(iemOp_prefetch_Grp16)
3179{
3180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3181 if (IEM_IS_MODRM_MEM_MODE(bRm))
3182 {
3183 switch (IEM_GET_MODRM_REG_8(bRm))
3184 {
3185 case 4: /* Aliased to /0 for the time being according to AMD. */
3186 case 5: /* Aliased to /0 for the time being according to AMD. */
3187 case 6: /* Aliased to /0 for the time being according to AMD. */
3188 case 7: /* Aliased to /0 for the time being according to AMD. */
3189 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3190 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3191 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3192 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3194 }
3195
3196 IEM_MC_BEGIN(0, 1);
3197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3200 /* Currently a NOP. */
3201 NOREF(GCPtrEffSrc);
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 return VINF_SUCCESS;
3205 }
3206
3207 return IEMOP_RAISE_INVALID_OPCODE();
3208}
3209
3210
3211/** Opcode 0x0f 0x19..0x1f. */
3212FNIEMOP_DEF(iemOp_nop_Ev)
3213{
3214 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3216 if (IEM_IS_MODRM_REG_MODE(bRm))
3217 {
3218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3219 IEM_MC_BEGIN(0, 0);
3220 IEM_MC_ADVANCE_RIP();
3221 IEM_MC_END();
3222 }
3223 else
3224 {
3225 IEM_MC_BEGIN(0, 1);
3226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3229 /* Currently a NOP. */
3230 NOREF(GCPtrEffSrc);
3231 IEM_MC_ADVANCE_RIP();
3232 IEM_MC_END();
3233 }
3234 return VINF_SUCCESS;
3235}
3236
3237
3238/** Opcode 0x0f 0x20. */
3239FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3240{
3241 /* mod is ignored, as is operand size overrides. */
3242 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3243 IEMOP_HLP_MIN_386();
3244 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3245 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3246 else
3247 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3248
3249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3250 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3251 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3252 {
3253 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3254 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3255 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3256 iCrReg |= 8;
3257 }
3258 switch (iCrReg)
3259 {
3260 case 0: case 2: case 3: case 4: case 8:
3261 break;
3262 default:
3263 return IEMOP_RAISE_INVALID_OPCODE();
3264 }
3265 IEMOP_HLP_DONE_DECODING();
3266
3267 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3268}
3269
3270
3271/** Opcode 0x0f 0x21. */
3272FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3273{
3274 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3275 IEMOP_HLP_MIN_386();
3276 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3278 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3279 return IEMOP_RAISE_INVALID_OPCODE();
3280 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3281 IEM_GET_MODRM_RM(pVCpu, bRm),
3282 IEM_GET_MODRM_REG_8(bRm));
3283}
3284
3285
3286/** Opcode 0x0f 0x22. */
3287FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3288{
3289 /* mod is ignored, as is operand size overrides. */
3290 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3291 IEMOP_HLP_MIN_386();
3292 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3293 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3294 else
3295 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3296
3297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3298 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3299 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3300 {
3301 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3302 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3303 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3304 iCrReg |= 8;
3305 }
3306 switch (iCrReg)
3307 {
3308 case 0: case 2: case 3: case 4: case 8:
3309 break;
3310 default:
3311 return IEMOP_RAISE_INVALID_OPCODE();
3312 }
3313 IEMOP_HLP_DONE_DECODING();
3314
3315 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3316}
3317
3318
3319/** Opcode 0x0f 0x23. */
3320FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3321{
3322 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3323 IEMOP_HLP_MIN_386();
3324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3326 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3327 return IEMOP_RAISE_INVALID_OPCODE();
3328 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3329 IEM_GET_MODRM_REG_8(bRm),
3330 IEM_GET_MODRM_RM(pVCpu, bRm));
3331}
3332
3333
3334/** Opcode 0x0f 0x24. */
3335FNIEMOP_DEF(iemOp_mov_Rd_Td)
3336{
3337 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3338 IEMOP_HLP_MIN_386();
3339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3342 return IEMOP_RAISE_INVALID_OPCODE();
3343 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3344 IEM_GET_MODRM_RM(pVCpu, bRm),
3345 IEM_GET_MODRM_REG_8(bRm));
3346}
3347
3348
3349/** Opcode 0x0f 0x26. */
3350FNIEMOP_DEF(iemOp_mov_Td_Rd)
3351{
3352 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3353 IEMOP_HLP_MIN_386();
3354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3356 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3357 return IEMOP_RAISE_INVALID_OPCODE();
3358 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3359 IEM_GET_MODRM_REG_8(bRm),
3360 IEM_GET_MODRM_RM(pVCpu, bRm));
3361}
3362
3363
3364/**
3365 * @opcode 0x28
3366 * @oppfx none
3367 * @opcpuid sse
3368 * @opgroup og_sse_simdfp_datamove
3369 * @opxcpttype 1
3370 * @optest op1=1 op2=2 -> op1=2
3371 * @optest op1=0 op2=-42 -> op1=-42
3372 */
3373FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3374{
3375 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3377 if (IEM_IS_MODRM_REG_MODE(bRm))
3378 {
3379 /*
3380 * Register, register.
3381 */
3382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3383 IEM_MC_BEGIN(0, 0);
3384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3385 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3386 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3387 IEM_GET_MODRM_RM(pVCpu, bRm));
3388 IEM_MC_ADVANCE_RIP();
3389 IEM_MC_END();
3390 }
3391 else
3392 {
3393 /*
3394 * Register, memory.
3395 */
3396 IEM_MC_BEGIN(0, 2);
3397 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3399
3400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3402 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3403 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3404
3405 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3406 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3407
3408 IEM_MC_ADVANCE_RIP();
3409 IEM_MC_END();
3410 }
3411 return VINF_SUCCESS;
3412}
3413
3414/**
3415 * @opcode 0x28
3416 * @oppfx 66
3417 * @opcpuid sse2
3418 * @opgroup og_sse2_pcksclr_datamove
3419 * @opxcpttype 1
3420 * @optest op1=1 op2=2 -> op1=2
3421 * @optest op1=0 op2=-42 -> op1=-42
3422 */
3423FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3424{
3425 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3427 if (IEM_IS_MODRM_REG_MODE(bRm))
3428 {
3429 /*
3430 * Register, register.
3431 */
3432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3433 IEM_MC_BEGIN(0, 0);
3434 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3436 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3437 IEM_GET_MODRM_RM(pVCpu, bRm));
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 }
3441 else
3442 {
3443 /*
3444 * Register, memory.
3445 */
3446 IEM_MC_BEGIN(0, 2);
3447 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3449
3450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3452 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3453 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3454
3455 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3456 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3457
3458 IEM_MC_ADVANCE_RIP();
3459 IEM_MC_END();
3460 }
3461 return VINF_SUCCESS;
3462}
3463
3464/* Opcode 0xf3 0x0f 0x28 - invalid */
3465/* Opcode 0xf2 0x0f 0x28 - invalid */
3466
3467/**
3468 * @opcode 0x29
3469 * @oppfx none
3470 * @opcpuid sse
3471 * @opgroup og_sse_simdfp_datamove
3472 * @opxcpttype 1
3473 * @optest op1=1 op2=2 -> op1=2
3474 * @optest op1=0 op2=-42 -> op1=-42
3475 */
3476FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3477{
3478 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3480 if (IEM_IS_MODRM_REG_MODE(bRm))
3481 {
3482 /*
3483 * Register, register.
3484 */
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_BEGIN(0, 0);
3487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3489 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3490 IEM_GET_MODRM_REG(pVCpu, bRm));
3491 IEM_MC_ADVANCE_RIP();
3492 IEM_MC_END();
3493 }
3494 else
3495 {
3496 /*
3497 * Memory, register.
3498 */
3499 IEM_MC_BEGIN(0, 2);
3500 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3502
3503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3505 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3507
3508 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3509 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3510
3511 IEM_MC_ADVANCE_RIP();
3512 IEM_MC_END();
3513 }
3514 return VINF_SUCCESS;
3515}
3516
3517/**
3518 * @opcode 0x29
3519 * @oppfx 66
3520 * @opcpuid sse2
3521 * @opgroup og_sse2_pcksclr_datamove
3522 * @opxcpttype 1
3523 * @optest op1=1 op2=2 -> op1=2
3524 * @optest op1=0 op2=-42 -> op1=-42
3525 */
3526FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3527{
3528 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3530 if (IEM_IS_MODRM_REG_MODE(bRm))
3531 {
3532 /*
3533 * Register, register.
3534 */
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_BEGIN(0, 0);
3537 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3538 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3539 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3540 IEM_GET_MODRM_REG(pVCpu, bRm));
3541 IEM_MC_ADVANCE_RIP();
3542 IEM_MC_END();
3543 }
3544 else
3545 {
3546 /*
3547 * Memory, register.
3548 */
3549 IEM_MC_BEGIN(0, 2);
3550 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3552
3553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3555 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3557
3558 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3559 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3560
3561 IEM_MC_ADVANCE_RIP();
3562 IEM_MC_END();
3563 }
3564 return VINF_SUCCESS;
3565}
3566
3567/* Opcode 0xf3 0x0f 0x29 - invalid */
3568/* Opcode 0xf2 0x0f 0x29 - invalid */
3569
3570
3571/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3572FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3573/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3574FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3575/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3576FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
3577/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3578FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
3579
3580
3581/**
3582 * @opcode 0x2b
3583 * @opcodesub !11 mr/reg
3584 * @oppfx none
3585 * @opcpuid sse
3586 * @opgroup og_sse1_cachect
3587 * @opxcpttype 1
3588 * @optest op1=1 op2=2 -> op1=2
3589 * @optest op1=0 op2=-42 -> op1=-42
3590 */
3591FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3592{
3593 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3594 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3595 if (IEM_IS_MODRM_MEM_MODE(bRm))
3596 {
3597 /*
3598 * memory, register.
3599 */
3600 IEM_MC_BEGIN(0, 2);
3601 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3603
3604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3606 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3607 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3608
3609 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3610 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3611
3612 IEM_MC_ADVANCE_RIP();
3613 IEM_MC_END();
3614 }
3615 /* The register, register encoding is invalid. */
3616 else
3617 return IEMOP_RAISE_INVALID_OPCODE();
3618 return VINF_SUCCESS;
3619}
3620
3621/**
3622 * @opcode 0x2b
3623 * @opcodesub !11 mr/reg
3624 * @oppfx 0x66
3625 * @opcpuid sse2
3626 * @opgroup og_sse2_cachect
3627 * @opxcpttype 1
3628 * @optest op1=1 op2=2 -> op1=2
3629 * @optest op1=0 op2=-42 -> op1=-42
3630 */
3631FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3632{
3633 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3635 if (IEM_IS_MODRM_MEM_MODE(bRm))
3636 {
3637 /*
3638 * memory, register.
3639 */
3640 IEM_MC_BEGIN(0, 2);
3641 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3643
3644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3646 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3647 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3648
3649 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3650 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3651
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 }
3655 /* The register, register encoding is invalid. */
3656 else
3657 return IEMOP_RAISE_INVALID_OPCODE();
3658 return VINF_SUCCESS;
3659}
3660/* Opcode 0xf3 0x0f 0x2b - invalid */
3661/* Opcode 0xf2 0x0f 0x2b - invalid */
3662
3663
3664/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3665FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3666/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3667FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3668/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3669FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
3670/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
3671FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
3672
3673/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
3674FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
3675/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
3676FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
3677/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
3678FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
3679/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
3680FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
3681
3682/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
3683FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
3684/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
3685FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
3686/* Opcode 0xf3 0x0f 0x2e - invalid */
3687/* Opcode 0xf2 0x0f 0x2e - invalid */
3688
3689/** Opcode 0x0f 0x2f - comiss Vss, Wss */
3690FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
3691/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
3692FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
3693/* Opcode 0xf3 0x0f 0x2f - invalid */
3694/* Opcode 0xf2 0x0f 0x2f - invalid */
3695
3696/** Opcode 0x0f 0x30. */
3697FNIEMOP_DEF(iemOp_wrmsr)
3698{
3699 IEMOP_MNEMONIC(wrmsr, "wrmsr");
3700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3701 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
3702}
3703
3704
3705/** Opcode 0x0f 0x31. */
3706FNIEMOP_DEF(iemOp_rdtsc)
3707{
3708 IEMOP_MNEMONIC(rdtsc, "rdtsc");
3709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3710 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
3711}
3712
3713
3714/** Opcode 0x0f 0x33. */
3715FNIEMOP_DEF(iemOp_rdmsr)
3716{
3717 IEMOP_MNEMONIC(rdmsr, "rdmsr");
3718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3719 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
3720}
3721
3722
3723/** Opcode 0x0f 0x34. */
3724FNIEMOP_DEF(iemOp_rdpmc)
3725{
3726 IEMOP_MNEMONIC(rdpmc, "rdpmc");
3727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3728 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
3729}
3730
3731
3732/** Opcode 0x0f 0x34. */
3733FNIEMOP_DEF(iemOp_sysenter)
3734{
3735 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3737 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
3738}
3739
3740/** Opcode 0x0f 0x35. */
3741FNIEMOP_DEF(iemOp_sysexit)
3742{
3743 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3745 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
3746}
3747
3748/** Opcode 0x0f 0x37. */
3749FNIEMOP_STUB(iemOp_getsec);
3750
3751
3752/** Opcode 0x0f 0x38. */
3753FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
3754{
3755#ifdef IEM_WITH_THREE_0F_38
3756 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3757 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3758#else
3759 IEMOP_BITCH_ABOUT_STUB();
3760 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3761#endif
3762}
3763
3764
3765/** Opcode 0x0f 0x3a. */
3766FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
3767{
3768#ifdef IEM_WITH_THREE_0F_3A
3769 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3770 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3771#else
3772 IEMOP_BITCH_ABOUT_STUB();
3773 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3774#endif
3775}
3776
3777
3778/**
3779 * Implements a conditional move.
3780 *
3781 * Wish there was an obvious way to do this where we could share and reduce
3782 * code bloat.
3783 *
3784 * @param a_Cnd The conditional "microcode" operation.
3785 */
3786#define CMOV_X(a_Cnd) \
3787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3788 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3789 { \
3790 switch (pVCpu->iem.s.enmEffOpSize) \
3791 { \
3792 case IEMMODE_16BIT: \
3793 IEM_MC_BEGIN(0, 1); \
3794 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3795 a_Cnd { \
3796 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3797 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3798 } IEM_MC_ENDIF(); \
3799 IEM_MC_ADVANCE_RIP(); \
3800 IEM_MC_END(); \
3801 return VINF_SUCCESS; \
3802 \
3803 case IEMMODE_32BIT: \
3804 IEM_MC_BEGIN(0, 1); \
3805 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3806 a_Cnd { \
3807 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3808 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3809 } IEM_MC_ELSE() { \
3810 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3811 } IEM_MC_ENDIF(); \
3812 IEM_MC_ADVANCE_RIP(); \
3813 IEM_MC_END(); \
3814 return VINF_SUCCESS; \
3815 \
3816 case IEMMODE_64BIT: \
3817 IEM_MC_BEGIN(0, 1); \
3818 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3819 a_Cnd { \
3820 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3821 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3822 } IEM_MC_ENDIF(); \
3823 IEM_MC_ADVANCE_RIP(); \
3824 IEM_MC_END(); \
3825 return VINF_SUCCESS; \
3826 \
3827 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3828 } \
3829 } \
3830 else \
3831 { \
3832 switch (pVCpu->iem.s.enmEffOpSize) \
3833 { \
3834 case IEMMODE_16BIT: \
3835 IEM_MC_BEGIN(0, 2); \
3836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3837 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3839 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3840 a_Cnd { \
3841 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3842 } IEM_MC_ENDIF(); \
3843 IEM_MC_ADVANCE_RIP(); \
3844 IEM_MC_END(); \
3845 return VINF_SUCCESS; \
3846 \
3847 case IEMMODE_32BIT: \
3848 IEM_MC_BEGIN(0, 2); \
3849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3850 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3852 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3853 a_Cnd { \
3854 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3855 } IEM_MC_ELSE() { \
3856 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3857 } IEM_MC_ENDIF(); \
3858 IEM_MC_ADVANCE_RIP(); \
3859 IEM_MC_END(); \
3860 return VINF_SUCCESS; \
3861 \
3862 case IEMMODE_64BIT: \
3863 IEM_MC_BEGIN(0, 2); \
3864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3865 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3867 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3868 a_Cnd { \
3869 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3870 } IEM_MC_ENDIF(); \
3871 IEM_MC_ADVANCE_RIP(); \
3872 IEM_MC_END(); \
3873 return VINF_SUCCESS; \
3874 \
3875 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3876 } \
3877 } do {} while (0)
3878
3879
3880
3881/** Opcode 0x0f 0x40. */
3882FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3883{
3884 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3885 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3886}
3887
3888
3889/** Opcode 0x0f 0x41. */
3890FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3891{
3892 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3893 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3894}
3895
3896
3897/** Opcode 0x0f 0x42. */
3898FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3899{
3900 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3901 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3902}
3903
3904
3905/** Opcode 0x0f 0x43. */
3906FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3907{
3908 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3909 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3910}
3911
3912
3913/** Opcode 0x0f 0x44. */
3914FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3915{
3916 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3917 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3918}
3919
3920
3921/** Opcode 0x0f 0x45. */
3922FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3923{
3924 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3925 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3926}
3927
3928
3929/** Opcode 0x0f 0x46. */
3930FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3931{
3932 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3933 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3934}
3935
3936
3937/** Opcode 0x0f 0x47. */
3938FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3939{
3940 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3941 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3942}
3943
3944
3945/** Opcode 0x0f 0x48. */
3946FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3947{
3948 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3949 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3950}
3951
3952
3953/** Opcode 0x0f 0x49. */
3954FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3955{
3956 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3957 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3958}
3959
3960
3961/** Opcode 0x0f 0x4a. */
3962FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3963{
3964 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3965 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3966}
3967
3968
3969/** Opcode 0x0f 0x4b. */
3970FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3971{
3972 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3973 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3974}
3975
3976
3977/** Opcode 0x0f 0x4c. */
3978FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3979{
3980 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3981 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3982}
3983
3984
3985/** Opcode 0x0f 0x4d. */
3986FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3987{
3988 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3989 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3990}
3991
3992
3993/** Opcode 0x0f 0x4e. */
3994FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3995{
3996 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3997 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3998}
3999
4000
4001/** Opcode 0x0f 0x4f. */
4002FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
4003{
4004 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
4005 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
4006}
4007
4008#undef CMOV_X
4009
4010/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
4011FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
4012/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
4013FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
4014/* Opcode 0xf3 0x0f 0x50 - invalid */
4015/* Opcode 0xf2 0x0f 0x50 - invalid */
4016
4017
4018/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
4019FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
4020{
4021 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4022 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
4023}
4024
4025
4026/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
4027FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
4028{
4029 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4030 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
4031}
4032
4033
4034/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
4035FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
4036{
4037 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4038 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
4039}
4040
4041
4042/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
4043FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
4044{
4045 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4046 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
4047}
4048
4049
4050/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
4051FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
4052/* Opcode 0x66 0x0f 0x52 - invalid */
4053/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
4054FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
4055/* Opcode 0xf2 0x0f 0x52 - invalid */
4056
4057/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
4058FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
4059/* Opcode 0x66 0x0f 0x53 - invalid */
4060/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
4061FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
4062/* Opcode 0xf2 0x0f 0x53 - invalid */
4063
4064
4065/** Opcode 0x0f 0x54 - andps Vps, Wps */
4066FNIEMOP_DEF(iemOp_andps_Vps_Wps)
4067{
4068 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4069 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
4070}
4071
4072
4073/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
4074FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
4075{
4076 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4077 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
4078}
4079
4080
4081/* Opcode 0xf3 0x0f 0x54 - invalid */
4082/* Opcode 0xf2 0x0f 0x54 - invalid */
4083
4084
4085/** Opcode 0x0f 0x55 - andnps Vps, Wps */
4086FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
4087{
4088 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4089 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
4090}
4091
4092
4093/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
4094FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
4095{
4096 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4097 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
4098}
4099
4100
4101/* Opcode 0xf3 0x0f 0x55 - invalid */
4102/* Opcode 0xf2 0x0f 0x55 - invalid */
4103
4104
4105/** Opcode 0x0f 0x56 - orps Vps, Wps */
4106FNIEMOP_DEF(iemOp_orps_Vps_Wps)
4107{
4108 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4109 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
4110}
4111
4112
4113/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
4114FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
4115{
4116 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4117 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
4118}
4119
4120
4121/* Opcode 0xf3 0x0f 0x56 - invalid */
4122/* Opcode 0xf2 0x0f 0x56 - invalid */
4123
4124
4125/** Opcode 0x0f 0x57 - xorps Vps, Wps */
4126FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
4127{
4128 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4129 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
4130}
4131
4132
4133/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
4134FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
4135{
4136 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4137 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
4138}
4139
4140
4141/* Opcode 0xf3 0x0f 0x57 - invalid */
4142/* Opcode 0xf2 0x0f 0x57 - invalid */
4143
4144/** Opcode 0x0f 0x58 - addps Vps, Wps */
4145FNIEMOP_DEF(iemOp_addps_Vps_Wps)
4146{
4147 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4148 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
4149}
4150
4151
4152/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
4153FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
4154{
4155 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4156 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
4157}
4158
4159
4160/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
4161FNIEMOP_DEF(iemOp_addss_Vss_Wss)
4162{
4163 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4164 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
4165}
4166
4167
4168/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
4169FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
4170{
4171 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4172 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
4173}
4174
4175
4176/** Opcode 0x0f 0x59 - mulps Vps, Wps */
4177FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
4178{
4179 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4180 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
4181}
4182
4183
4184/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
4185FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
4186{
4187 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4188 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
4189}
4190
4191
4192/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
4193FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
4194{
4195 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4196 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
4197}
4198
4199
4200/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
4201FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
4202{
4203 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4204 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
4205}
4206
4207
4208/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
4209FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
4210
4211
4212/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
4213FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
4214{
4215 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
4216 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
4217}
4218
4219
4220/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
4221FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
4222{
4223 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
4224 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
4225}
4226
4227
4228/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
4229FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
4230{
4231 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
4232 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
4233}
4234
4235
4236/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
4237FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
4238/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
4239FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
4240/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
4241FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
4242/* Opcode 0xf2 0x0f 0x5b - invalid */
4243
4244
4245/** Opcode 0x0f 0x5c - subps Vps, Wps */
4246FNIEMOP_DEF(iemOp_subps_Vps_Wps)
4247{
4248 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4249 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
4250}
4251
4252
4253/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
4254FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
4255{
4256 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4257 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
4258}
4259
4260
4261/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
4262FNIEMOP_DEF(iemOp_subss_Vss_Wss)
4263{
4264 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4265 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
4266}
4267
4268
4269/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
4270FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
4271{
4272 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4273 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
4274}
4275
4276
4277/** Opcode 0x0f 0x5d - minps Vps, Wps */
4278FNIEMOP_DEF(iemOp_minps_Vps_Wps)
4279{
4280 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4281 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
4282}
4283
4284
4285/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
4286FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
4287{
4288 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4289 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
4290}
4291
4292
4293/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
4294FNIEMOP_DEF(iemOp_minss_Vss_Wss)
4295{
4296 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4297 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
4298}
4299
4300
4301/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
4302FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
4303{
4304 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4305 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
4306}
4307
4308
4309/** Opcode 0x0f 0x5e - divps Vps, Wps */
4310FNIEMOP_DEF(iemOp_divps_Vps_Wps)
4311{
4312 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4313 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
4314}
4315
4316
4317/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
4318FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
4319{
4320 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4321 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
4322}
4323
4324
4325/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
4326FNIEMOP_DEF(iemOp_divss_Vss_Wss)
4327{
4328 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4329 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
4330}
4331
4332
4333/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
4334FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
4335{
4336 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4337 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
4338}
4339
4340
4341/** Opcode 0x0f 0x5f - maxps Vps, Wps */
4342FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
4343{
4344 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4345 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
4346}
4347
4348
4349/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
4350FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
4351{
4352 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4353 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
4354}
4355
4356
4357/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
4358FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
4359{
4360 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4361 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
4362}
4363
4364
4365/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
4366FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
4367{
4368 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4369 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
4370}
4371
4372
4373/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
4374FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
4375{
4376 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4377 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
4378}
4379
4380
4381/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
4382FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
4383{
4384 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4385 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
4386}
4387
4388
4389/* Opcode 0xf3 0x0f 0x60 - invalid */
4390
4391
4392/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
4393FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
4394{
4395 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
4396 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4397 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
4398}
4399
4400
4401/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
4402FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
4403{
4404 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4405 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
4406}
4407
4408
4409/* Opcode 0xf3 0x0f 0x61 - invalid */
4410
4411
4412/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
4413FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
4414{
4415 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4416 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
4417}
4418
4419
4420/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
4421FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
4422{
4423 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4424 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
4425}
4426
4427
4428/* Opcode 0xf3 0x0f 0x62 - invalid */
4429
4430
4431
4432/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
4433FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
4434{
4435 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4436 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
4437}
4438
4439
4440/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
4441FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
4442{
4443 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4444 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
4445}
4446
4447
4448/* Opcode 0xf3 0x0f 0x63 - invalid */
4449
4450
4451/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
4452FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
4453{
4454 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4455 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
4456}
4457
4458
4459/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
4460FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
4461{
4462 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4463 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
4464}
4465
4466
4467/* Opcode 0xf3 0x0f 0x64 - invalid */
4468
4469
4470/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
4471FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
4472{
4473 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4474 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
4475}
4476
4477
4478/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
4479FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
4480{
4481 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4482 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
4483}
4484
4485
4486/* Opcode 0xf3 0x0f 0x65 - invalid */
4487
4488
4489/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
4490FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
4491{
4492 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4493 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
4494}
4495
4496
4497/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
4498FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
4499{
4500 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4501 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
4502}
4503
4504
4505/* Opcode 0xf3 0x0f 0x66 - invalid */
4506
4507
4508/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
4509FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
4510{
4511 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4512 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
4513}
4514
4515
4516/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
4517FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
4518{
4519 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4520 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
4521}
4522
4523
4524/* Opcode 0xf3 0x0f 0x67 - invalid */
4525
4526
4527/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
4528 * @note Intel and AMD both uses Qd for the second parameter, however they
4529 * both list it as a mmX/mem64 operand and intel describes it as being
4530 * loaded as a qword, so it should be Qq, shouldn't it? */
4531FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
4532{
4533 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4534 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
4535}
4536
4537
4538/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
4539FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
4540{
4541 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4542 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
4543}
4544
4545
4546/* Opcode 0xf3 0x0f 0x68 - invalid */
4547
4548
4549/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
4550 * @note Intel and AMD both uses Qd for the second parameter, however they
4551 * both list it as a mmX/mem64 operand and intel describes it as being
4552 * loaded as a qword, so it should be Qq, shouldn't it? */
4553FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
4554{
4555 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4556 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
4557}
4558
4559
4560/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
4561FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
4562{
4563 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4564 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
4565
4566}
4567
4568
4569/* Opcode 0xf3 0x0f 0x69 - invalid */
4570
4571
4572/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
4573 * @note Intel and AMD both uses Qd for the second parameter, however they
4574 * both list it as a mmX/mem64 operand and intel describes it as being
4575 * loaded as a qword, so it should be Qq, shouldn't it? */
4576FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
4577{
4578 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4579 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
4580}
4581
4582
4583/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
4584FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
4585{
4586 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4587 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
4588}
4589
4590
4591/* Opcode 0xf3 0x0f 0x6a - invalid */
4592
4593
4594/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
4595FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
4596{
4597 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4598 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
4599}
4600
4601
4602/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
4603FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
4604{
4605 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4606 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
4607}
4608
4609
4610/* Opcode 0xf3 0x0f 0x6b - invalid */
4611
4612
4613/* Opcode 0x0f 0x6c - invalid */
4614
4615
4616/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
4617FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
4618{
4619 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4620 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
4621}
4622
4623
4624/* Opcode 0xf3 0x0f 0x6c - invalid */
4625/* Opcode 0xf2 0x0f 0x6c - invalid */
4626
4627
4628/* Opcode 0x0f 0x6d - invalid */
4629
4630
4631/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
4632FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
4633{
4634 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4635 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
4636}
4637
4638
4639/* Opcode 0xf3 0x0f 0x6d - invalid */
4640
4641
4642FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
4643{
4644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4645 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4646 {
4647 /**
4648 * @opcode 0x6e
4649 * @opcodesub rex.w=1
4650 * @oppfx none
4651 * @opcpuid mmx
4652 * @opgroup og_mmx_datamove
4653 * @opxcpttype 5
4654 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4655 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4656 */
4657 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4658 if (IEM_IS_MODRM_REG_MODE(bRm))
4659 {
4660 /* MMX, greg64 */
4661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4662 IEM_MC_BEGIN(0, 1);
4663 IEM_MC_LOCAL(uint64_t, u64Tmp);
4664
4665 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4666 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4667
4668 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4669 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4670 IEM_MC_FPU_TO_MMX_MODE();
4671
4672 IEM_MC_ADVANCE_RIP();
4673 IEM_MC_END();
4674 }
4675 else
4676 {
4677 /* MMX, [mem64] */
4678 IEM_MC_BEGIN(0, 2);
4679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4680 IEM_MC_LOCAL(uint64_t, u64Tmp);
4681
4682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4684 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4685 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4686
4687 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4688 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4689 IEM_MC_FPU_TO_MMX_MODE();
4690
4691 IEM_MC_ADVANCE_RIP();
4692 IEM_MC_END();
4693 }
4694 }
4695 else
4696 {
4697 /**
4698 * @opdone
4699 * @opcode 0x6e
4700 * @opcodesub rex.w=0
4701 * @oppfx none
4702 * @opcpuid mmx
4703 * @opgroup og_mmx_datamove
4704 * @opxcpttype 5
4705 * @opfunction iemOp_movd_q_Pd_Ey
4706 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4707 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4708 */
4709 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4710 if (IEM_IS_MODRM_REG_MODE(bRm))
4711 {
4712 /* MMX, greg */
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4714 IEM_MC_BEGIN(0, 1);
4715 IEM_MC_LOCAL(uint64_t, u64Tmp);
4716
4717 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4718 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4719
4720 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4721 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4722 IEM_MC_FPU_TO_MMX_MODE();
4723
4724 IEM_MC_ADVANCE_RIP();
4725 IEM_MC_END();
4726 }
4727 else
4728 {
4729 /* MMX, [mem] */
4730 IEM_MC_BEGIN(0, 2);
4731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4732 IEM_MC_LOCAL(uint32_t, u32Tmp);
4733
4734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4737 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4738
4739 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4740 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4741 IEM_MC_FPU_TO_MMX_MODE();
4742
4743 IEM_MC_ADVANCE_RIP();
4744 IEM_MC_END();
4745 }
4746 }
4747 return VINF_SUCCESS;
4748}
4749
4750FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4751{
4752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4753 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4754 {
4755 /**
4756 * @opcode 0x6e
4757 * @opcodesub rex.w=1
4758 * @oppfx 0x66
4759 * @opcpuid sse2
4760 * @opgroup og_sse2_simdint_datamove
4761 * @opxcpttype 5
4762 * @optest 64-bit / op1=1 op2=2 -> op1=2
4763 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4764 */
4765 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4766 if (IEM_IS_MODRM_REG_MODE(bRm))
4767 {
4768 /* XMM, greg64 */
4769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4770 IEM_MC_BEGIN(0, 1);
4771 IEM_MC_LOCAL(uint64_t, u64Tmp);
4772
4773 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4774 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4775
4776 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4777 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4778
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 }
4782 else
4783 {
4784 /* XMM, [mem64] */
4785 IEM_MC_BEGIN(0, 2);
4786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4787 IEM_MC_LOCAL(uint64_t, u64Tmp);
4788
4789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4791 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4792 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4793
4794 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4795 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4796
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 }
4800 }
4801 else
4802 {
4803 /**
4804 * @opdone
4805 * @opcode 0x6e
4806 * @opcodesub rex.w=0
4807 * @oppfx 0x66
4808 * @opcpuid sse2
4809 * @opgroup og_sse2_simdint_datamove
4810 * @opxcpttype 5
4811 * @opfunction iemOp_movd_q_Vy_Ey
4812 * @optest op1=1 op2=2 -> op1=2
4813 * @optest op1=0 op2=-42 -> op1=-42
4814 */
4815 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4816 if (IEM_IS_MODRM_REG_MODE(bRm))
4817 {
4818 /* XMM, greg32 */
4819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4820 IEM_MC_BEGIN(0, 1);
4821 IEM_MC_LOCAL(uint32_t, u32Tmp);
4822
4823 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4824 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4825
4826 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4827 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4828
4829 IEM_MC_ADVANCE_RIP();
4830 IEM_MC_END();
4831 }
4832 else
4833 {
4834 /* XMM, [mem32] */
4835 IEM_MC_BEGIN(0, 2);
4836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4837 IEM_MC_LOCAL(uint32_t, u32Tmp);
4838
4839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4841 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4842 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4843
4844 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4845 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4846
4847 IEM_MC_ADVANCE_RIP();
4848 IEM_MC_END();
4849 }
4850 }
4851 return VINF_SUCCESS;
4852}
4853
4854/* Opcode 0xf3 0x0f 0x6e - invalid */
4855
4856
4857/**
4858 * @opcode 0x6f
4859 * @oppfx none
4860 * @opcpuid mmx
4861 * @opgroup og_mmx_datamove
4862 * @opxcpttype 5
4863 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4864 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4865 */
4866FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4867{
4868 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4870 if (IEM_IS_MODRM_REG_MODE(bRm))
4871 {
4872 /*
4873 * Register, register.
4874 */
4875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4876 IEM_MC_BEGIN(0, 1);
4877 IEM_MC_LOCAL(uint64_t, u64Tmp);
4878
4879 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4880 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4881
4882 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4883 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4884 IEM_MC_FPU_TO_MMX_MODE();
4885
4886 IEM_MC_ADVANCE_RIP();
4887 IEM_MC_END();
4888 }
4889 else
4890 {
4891 /*
4892 * Register, memory.
4893 */
4894 IEM_MC_BEGIN(0, 2);
4895 IEM_MC_LOCAL(uint64_t, u64Tmp);
4896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4897
4898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4900 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4901 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4902
4903 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4904 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4905 IEM_MC_FPU_TO_MMX_MODE();
4906
4907 IEM_MC_ADVANCE_RIP();
4908 IEM_MC_END();
4909 }
4910 return VINF_SUCCESS;
4911}
4912
4913/**
4914 * @opcode 0x6f
4915 * @oppfx 0x66
4916 * @opcpuid sse2
4917 * @opgroup og_sse2_simdint_datamove
4918 * @opxcpttype 1
4919 * @optest op1=1 op2=2 -> op1=2
4920 * @optest op1=0 op2=-42 -> op1=-42
4921 */
4922FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4923{
4924 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4926 if (IEM_IS_MODRM_REG_MODE(bRm))
4927 {
4928 /*
4929 * Register, register.
4930 */
4931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4932 IEM_MC_BEGIN(0, 0);
4933
4934 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4935 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4936
4937 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4938 IEM_GET_MODRM_RM(pVCpu, bRm));
4939 IEM_MC_ADVANCE_RIP();
4940 IEM_MC_END();
4941 }
4942 else
4943 {
4944 /*
4945 * Register, memory.
4946 */
4947 IEM_MC_BEGIN(0, 2);
4948 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4950
4951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4953 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4954 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4955
4956 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4957 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4958
4959 IEM_MC_ADVANCE_RIP();
4960 IEM_MC_END();
4961 }
4962 return VINF_SUCCESS;
4963}
4964
4965/**
4966 * @opcode 0x6f
4967 * @oppfx 0xf3
4968 * @opcpuid sse2
4969 * @opgroup og_sse2_simdint_datamove
4970 * @opxcpttype 4UA
4971 * @optest op1=1 op2=2 -> op1=2
4972 * @optest op1=0 op2=-42 -> op1=-42
4973 */
4974FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4975{
4976 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4978 if (IEM_IS_MODRM_REG_MODE(bRm))
4979 {
4980 /*
4981 * Register, register.
4982 */
4983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4984 IEM_MC_BEGIN(0, 0);
4985 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4986 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4987 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4988 IEM_GET_MODRM_RM(pVCpu, bRm));
4989 IEM_MC_ADVANCE_RIP();
4990 IEM_MC_END();
4991 }
4992 else
4993 {
4994 /*
4995 * Register, memory.
4996 */
4997 IEM_MC_BEGIN(0, 2);
4998 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5000
5001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5003 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5004 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5005 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5006 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5007
5008 IEM_MC_ADVANCE_RIP();
5009 IEM_MC_END();
5010 }
5011 return VINF_SUCCESS;
5012}
5013
5014
5015/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
5016FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
5017{
5018 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5020 if (IEM_IS_MODRM_REG_MODE(bRm))
5021 {
5022 /*
5023 * Register, register.
5024 */
5025 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5027
5028 IEM_MC_BEGIN(3, 0);
5029 IEM_MC_ARG(uint64_t *, pDst, 0);
5030 IEM_MC_ARG(uint64_t const *, pSrc, 1);
5031 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5032 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
5033 IEM_MC_PREPARE_FPU_USAGE();
5034 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5035 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
5036 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
5037 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5038 IEM_MC_FPU_TO_MMX_MODE();
5039 IEM_MC_ADVANCE_RIP();
5040 IEM_MC_END();
5041 }
5042 else
5043 {
5044 /*
5045 * Register, memory.
5046 */
5047 IEM_MC_BEGIN(3, 2);
5048 IEM_MC_ARG(uint64_t *, pDst, 0);
5049 IEM_MC_LOCAL(uint64_t, uSrc);
5050 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
5051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5052
5053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5054 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5055 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5057 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
5058
5059 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5060 IEM_MC_PREPARE_FPU_USAGE();
5061 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5062 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
5063 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5064 IEM_MC_FPU_TO_MMX_MODE();
5065
5066 IEM_MC_ADVANCE_RIP();
5067 IEM_MC_END();
5068 }
5069 return VINF_SUCCESS;
5070}
5071
5072
5073/**
5074 * Common worker for SSE2 instructions on the forms:
5075 * pshufd xmm1, xmm2/mem128, imm8
5076 * pshufhw xmm1, xmm2/mem128, imm8
5077 * pshuflw xmm1, xmm2/mem128, imm8
5078 *
5079 * Proper alignment of the 128-bit operand is enforced.
5080 * Exceptions type 4. SSE2 cpuid checks.
5081 */
5082FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
5083{
5084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5085 if (IEM_IS_MODRM_REG_MODE(bRm))
5086 {
5087 /*
5088 * Register, register.
5089 */
5090 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5092
5093 IEM_MC_BEGIN(3, 0);
5094 IEM_MC_ARG(PRTUINT128U, puDst, 0);
5095 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5096 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5097 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5098 IEM_MC_PREPARE_SSE_USAGE();
5099 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5100 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5101 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
5102 IEM_MC_ADVANCE_RIP();
5103 IEM_MC_END();
5104 }
5105 else
5106 {
5107 /*
5108 * Register, memory.
5109 */
5110 IEM_MC_BEGIN(3, 2);
5111 IEM_MC_ARG(PRTUINT128U, puDst, 0);
5112 IEM_MC_LOCAL(RTUINT128U, uSrc);
5113 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
5114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5115
5116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5117 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
5118 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
5119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5120 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5121
5122 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5123 IEM_MC_PREPARE_SSE_USAGE();
5124 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5125 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
5126
5127 IEM_MC_ADVANCE_RIP();
5128 IEM_MC_END();
5129 }
5130 return VINF_SUCCESS;
5131}
5132
5133
5134/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
5135FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
5136{
5137 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5138 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
5139}
5140
5141
5142/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
5143FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
5144{
5145 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5146 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
5147}
5148
5149
5150/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
5151FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
5152{
5153 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5154 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
5155}
5156
5157
5158/**
5159 * Common worker for MMX instructions of the form:
5160 * psrlw mm, imm8
5161 * psraw mm, imm8
5162 * psllw mm, imm8
5163 * psrld mm, imm8
5164 * psrad mm, imm8
5165 * pslld mm, imm8
5166 * psrlq mm, imm8
5167 * psllq mm, imm8
5168 *
5169 */
5170FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
5171{
5172 if (IEM_IS_MODRM_REG_MODE(bRm))
5173 {
5174 /*
5175 * Register, immediate.
5176 */
5177 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179
5180 IEM_MC_BEGIN(2, 0);
5181 IEM_MC_ARG(uint64_t *, pDst, 0);
5182 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
5183 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5184 IEM_MC_PREPARE_FPU_USAGE();
5185 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5186 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
5187 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5188 IEM_MC_FPU_TO_MMX_MODE();
5189 IEM_MC_ADVANCE_RIP();
5190 IEM_MC_END();
5191 }
5192 else
5193 {
5194 /*
5195 * Register, memory not supported.
5196 */
5197 /// @todo Caller already enforced register mode?!
5198 }
5199 return VINF_SUCCESS;
5200}
5201
5202
5203/**
5204 * Common worker for SSE2 instructions of the form:
5205 * psrlw xmm, imm8
5206 * psraw xmm, imm8
5207 * psllw xmm, imm8
5208 * psrld xmm, imm8
5209 * psrad xmm, imm8
5210 * pslld xmm, imm8
5211 * psrlq xmm, imm8
5212 * psllq xmm, imm8
5213 *
5214 */
5215FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
5216{
5217 if (IEM_IS_MODRM_REG_MODE(bRm))
5218 {
5219 /*
5220 * Register, immediate.
5221 */
5222 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5224
5225 IEM_MC_BEGIN(2, 0);
5226 IEM_MC_ARG(PRTUINT128U, pDst, 0);
5227 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
5228 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5229 IEM_MC_PREPARE_SSE_USAGE();
5230 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5231 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
5232 IEM_MC_ADVANCE_RIP();
5233 IEM_MC_END();
5234 }
5235 else
5236 {
5237 /*
5238 * Register, memory.
5239 */
5240 /// @todo Caller already enforced register mode?!
5241 }
5242 return VINF_SUCCESS;
5243}
5244
5245
5246/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
5247FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
5248{
5249// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5250 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
5251}
5252
5253
5254/** Opcode 0x66 0x0f 0x71 11/2. */
5255FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
5256{
5257// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5258 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
5259}
5260
5261
5262/** Opcode 0x0f 0x71 11/4. */
5263FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
5264{
5265// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5266 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
5267}
5268
5269
5270/** Opcode 0x66 0x0f 0x71 11/4. */
5271FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
5272{
5273// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5274 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
5275}
5276
5277
5278/** Opcode 0x0f 0x71 11/6. */
5279FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
5280{
5281// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5282 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
5283}
5284
5285
5286/** Opcode 0x66 0x0f 0x71 11/6. */
5287FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
5288{
5289// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5290 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
5291}
5292
5293
5294/**
5295 * Group 12 jump table for register variant.
5296 */
5297IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
5298{
5299 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5300 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5301 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5302 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5303 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5304 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5305 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5306 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5307};
5308AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
5309
5310
5311/** Opcode 0x0f 0x71. */
5312FNIEMOP_DEF(iemOp_Grp12)
5313{
5314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5315 if (IEM_IS_MODRM_REG_MODE(bRm))
5316 /* register, register */
5317 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5318 + pVCpu->iem.s.idxPrefix], bRm);
5319 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5320}
5321
5322
5323/** Opcode 0x0f 0x72 11/2. */
5324FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
5325{
5326// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5327 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
5328}
5329
5330
5331/** Opcode 0x66 0x0f 0x72 11/2. */
5332FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
5333{
5334// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5335 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
5336}
5337
5338
5339/** Opcode 0x0f 0x72 11/4. */
5340FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
5341{
5342// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5343 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
5344}
5345
5346
5347/** Opcode 0x66 0x0f 0x72 11/4. */
5348FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
5349{
5350// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5351 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
5352}
5353
5354
5355/** Opcode 0x0f 0x72 11/6. */
5356FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
5357{
5358// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5359 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
5360}
5361
5362/** Opcode 0x66 0x0f 0x72 11/6. */
5363FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
5364{
5365// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5366 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
5367}
5368
5369
5370/**
5371 * Group 13 jump table for register variant.
5372 */
5373IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
5374{
5375 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5376 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5377 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5378 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5379 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5380 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5381 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5382 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5383};
5384AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
5385
5386/** Opcode 0x0f 0x72. */
5387FNIEMOP_DEF(iemOp_Grp13)
5388{
5389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5390 if (IEM_IS_MODRM_REG_MODE(bRm))
5391 /* register, register */
5392 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5393 + pVCpu->iem.s.idxPrefix], bRm);
5394 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5395}
5396
5397
5398/** Opcode 0x0f 0x73 11/2. */
5399FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
5400{
5401// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5402 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
5403}
5404
5405
5406/** Opcode 0x66 0x0f 0x73 11/2. */
5407FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
5408{
5409// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5410 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
5411}
5412
5413
5414/** Opcode 0x66 0x0f 0x73 11/3. */
5415FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
5416{
5417// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5418 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
5419}
5420
5421
5422/** Opcode 0x0f 0x73 11/6. */
5423FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
5424{
5425// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5426 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
5427}
5428
5429
5430/** Opcode 0x66 0x0f 0x73 11/6. */
5431FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
5432{
5433// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5434 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
5435}
5436
5437
5438/** Opcode 0x66 0x0f 0x73 11/7. */
5439FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
5440{
5441// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5442 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
5443}
5444
5445/**
5446 * Group 14 jump table for register variant.
5447 */
5448IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
5449{
5450 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5451 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5452 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5453 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5454 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5455 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5456 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5457 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5458};
5459AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
5460
5461
5462/** Opcode 0x0f 0x73. */
5463FNIEMOP_DEF(iemOp_Grp14)
5464{
5465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5466 if (IEM_IS_MODRM_REG_MODE(bRm))
5467 /* register, register */
5468 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5469 + pVCpu->iem.s.idxPrefix], bRm);
5470 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5471}
5472
5473
5474/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
5475FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
5476{
5477 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5478 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
5479}
5480
5481
5482/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
5483FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
5484{
5485 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5486 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
5487}
5488
5489
5490/* Opcode 0xf3 0x0f 0x74 - invalid */
5491/* Opcode 0xf2 0x0f 0x74 - invalid */
5492
5493
5494/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
5495FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
5496{
5497 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5498 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
5499}
5500
5501
5502/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
5503FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
5504{
5505 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5506 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
5507}
5508
5509
5510/* Opcode 0xf3 0x0f 0x75 - invalid */
5511/* Opcode 0xf2 0x0f 0x75 - invalid */
5512
5513
5514/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
5515FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
5516{
5517 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5518 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
5519}
5520
5521
5522/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
5523FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
5524{
5525 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5526 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
5527}
5528
5529
5530/* Opcode 0xf3 0x0f 0x76 - invalid */
5531/* Opcode 0xf2 0x0f 0x76 - invalid */
5532
5533
5534/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
5535FNIEMOP_DEF(iemOp_emms)
5536{
5537 IEMOP_MNEMONIC(emms, "emms");
5538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5539
5540 IEM_MC_BEGIN(0,0);
5541 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
5542 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5543 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5544 IEM_MC_FPU_FROM_MMX_MODE();
5545 IEM_MC_ADVANCE_RIP();
5546 IEM_MC_END();
5547 return VINF_SUCCESS;
5548}
5549
5550/* Opcode 0x66 0x0f 0x77 - invalid */
5551/* Opcode 0xf3 0x0f 0x77 - invalid */
5552/* Opcode 0xf2 0x0f 0x77 - invalid */
5553
5554/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
5555#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5556FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
5557{
5558 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
5559 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
5560 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
5561 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5562
5563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5564 if (IEM_IS_MODRM_REG_MODE(bRm))
5565 {
5566 /*
5567 * Register, register.
5568 */
5569 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5570 if (enmEffOpSize == IEMMODE_64BIT)
5571 {
5572 IEM_MC_BEGIN(2, 0);
5573 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5574 IEM_MC_ARG(uint64_t, u64Enc, 1);
5575 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5576 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5577 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
5578 IEM_MC_END();
5579 }
5580 else
5581 {
5582 IEM_MC_BEGIN(2, 0);
5583 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5584 IEM_MC_ARG(uint32_t, u32Enc, 1);
5585 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5586 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5587 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
5588 IEM_MC_END();
5589 }
5590 }
5591 else
5592 {
5593 /*
5594 * Memory, register.
5595 */
5596 if (enmEffOpSize == IEMMODE_64BIT)
5597 {
5598 IEM_MC_BEGIN(3, 0);
5599 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5600 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5601 IEM_MC_ARG(uint64_t, u64Enc, 2);
5602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5603 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5604 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5605 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5606 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
5607 IEM_MC_END();
5608 }
5609 else
5610 {
5611 IEM_MC_BEGIN(3, 0);
5612 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5613 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5614 IEM_MC_ARG(uint32_t, u32Enc, 2);
5615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5616 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5617 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5618 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5619 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
5620 IEM_MC_END();
5621 }
5622 }
5623 return VINF_SUCCESS;
5624}
5625#else
5626FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
5627#endif
5628
5629/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
5630FNIEMOP_STUB(iemOp_AmdGrp17);
5631/* Opcode 0xf3 0x0f 0x78 - invalid */
5632/* Opcode 0xf2 0x0f 0x78 - invalid */
5633
5634/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
5635#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5636FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
5637{
5638 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
5639 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
5640 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
5641 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5642
5643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5644 if (IEM_IS_MODRM_REG_MODE(bRm))
5645 {
5646 /*
5647 * Register, register.
5648 */
5649 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5650 if (enmEffOpSize == IEMMODE_64BIT)
5651 {
5652 IEM_MC_BEGIN(2, 0);
5653 IEM_MC_ARG(uint64_t, u64Val, 0);
5654 IEM_MC_ARG(uint64_t, u64Enc, 1);
5655 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5656 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5657 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
5658 IEM_MC_END();
5659 }
5660 else
5661 {
5662 IEM_MC_BEGIN(2, 0);
5663 IEM_MC_ARG(uint32_t, u32Val, 0);
5664 IEM_MC_ARG(uint32_t, u32Enc, 1);
5665 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5666 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5667 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
5668 IEM_MC_END();
5669 }
5670 }
5671 else
5672 {
5673 /*
5674 * Register, memory.
5675 */
5676 if (enmEffOpSize == IEMMODE_64BIT)
5677 {
5678 IEM_MC_BEGIN(3, 0);
5679 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5680 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5681 IEM_MC_ARG(uint64_t, u64Enc, 2);
5682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5683 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5684 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5685 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5686 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
5687 IEM_MC_END();
5688 }
5689 else
5690 {
5691 IEM_MC_BEGIN(3, 0);
5692 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5693 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5694 IEM_MC_ARG(uint32_t, u32Enc, 2);
5695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5696 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5697 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5698 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5699 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
5700 IEM_MC_END();
5701 }
5702 }
5703 return VINF_SUCCESS;
5704}
5705#else
5706FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
5707#endif
5708/* Opcode 0x66 0x0f 0x79 - invalid */
5709/* Opcode 0xf3 0x0f 0x79 - invalid */
5710/* Opcode 0xf2 0x0f 0x79 - invalid */
5711
5712/* Opcode 0x0f 0x7a - invalid */
5713/* Opcode 0x66 0x0f 0x7a - invalid */
5714/* Opcode 0xf3 0x0f 0x7a - invalid */
5715/* Opcode 0xf2 0x0f 0x7a - invalid */
5716
5717/* Opcode 0x0f 0x7b - invalid */
5718/* Opcode 0x66 0x0f 0x7b - invalid */
5719/* Opcode 0xf3 0x0f 0x7b - invalid */
5720/* Opcode 0xf2 0x0f 0x7b - invalid */
5721
5722/* Opcode 0x0f 0x7c - invalid */
5723
5724
5725/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5726FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
5727{
5728 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5729 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
5730}
5731
5732
5733/* Opcode 0xf3 0x0f 0x7c - invalid */
5734
5735
5736/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5737FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
5738{
5739 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5740 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
5741}
5742
5743
5744/* Opcode 0x0f 0x7d - invalid */
5745
5746
5747/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5748FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
5749{
5750 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5751 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
5752}
5753
5754
5755/* Opcode 0xf3 0x0f 0x7d - invalid */
5756
5757
5758/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5759FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
5760{
5761 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5762 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
5763}
5764
5765
5766/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5767FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5768{
5769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5770 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5771 {
5772 /**
5773 * @opcode 0x7e
5774 * @opcodesub rex.w=1
5775 * @oppfx none
5776 * @opcpuid mmx
5777 * @opgroup og_mmx_datamove
5778 * @opxcpttype 5
5779 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5780 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5781 */
5782 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5783 if (IEM_IS_MODRM_REG_MODE(bRm))
5784 {
5785 /* greg64, MMX */
5786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5787 IEM_MC_BEGIN(0, 1);
5788 IEM_MC_LOCAL(uint64_t, u64Tmp);
5789
5790 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5791 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5792
5793 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5794 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5795 IEM_MC_FPU_TO_MMX_MODE();
5796
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 }
5800 else
5801 {
5802 /* [mem64], MMX */
5803 IEM_MC_BEGIN(0, 2);
5804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5805 IEM_MC_LOCAL(uint64_t, u64Tmp);
5806
5807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5809 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5810 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5811
5812 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5813 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5814 IEM_MC_FPU_TO_MMX_MODE();
5815
5816 IEM_MC_ADVANCE_RIP();
5817 IEM_MC_END();
5818 }
5819 }
5820 else
5821 {
5822 /**
5823 * @opdone
5824 * @opcode 0x7e
5825 * @opcodesub rex.w=0
5826 * @oppfx none
5827 * @opcpuid mmx
5828 * @opgroup og_mmx_datamove
5829 * @opxcpttype 5
5830 * @opfunction iemOp_movd_q_Pd_Ey
5831 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5832 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5833 */
5834 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5835 if (IEM_IS_MODRM_REG_MODE(bRm))
5836 {
5837 /* greg32, MMX */
5838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5839 IEM_MC_BEGIN(0, 1);
5840 IEM_MC_LOCAL(uint32_t, u32Tmp);
5841
5842 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5843 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5844
5845 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5846 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5847 IEM_MC_FPU_TO_MMX_MODE();
5848
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 }
5852 else
5853 {
5854 /* [mem32], MMX */
5855 IEM_MC_BEGIN(0, 2);
5856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5857 IEM_MC_LOCAL(uint32_t, u32Tmp);
5858
5859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5861 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5862 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5863
5864 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5865 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5866 IEM_MC_FPU_TO_MMX_MODE();
5867
5868 IEM_MC_ADVANCE_RIP();
5869 IEM_MC_END();
5870 }
5871 }
5872 return VINF_SUCCESS;
5873
5874}
5875
5876
5877FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5878{
5879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5880 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5881 {
5882 /**
5883 * @opcode 0x7e
5884 * @opcodesub rex.w=1
5885 * @oppfx 0x66
5886 * @opcpuid sse2
5887 * @opgroup og_sse2_simdint_datamove
5888 * @opxcpttype 5
5889 * @optest 64-bit / op1=1 op2=2 -> op1=2
5890 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5891 */
5892 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5893 if (IEM_IS_MODRM_REG_MODE(bRm))
5894 {
5895 /* greg64, XMM */
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5897 IEM_MC_BEGIN(0, 1);
5898 IEM_MC_LOCAL(uint64_t, u64Tmp);
5899
5900 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5901 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5902
5903 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5904 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5905
5906 IEM_MC_ADVANCE_RIP();
5907 IEM_MC_END();
5908 }
5909 else
5910 {
5911 /* [mem64], XMM */
5912 IEM_MC_BEGIN(0, 2);
5913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5914 IEM_MC_LOCAL(uint64_t, u64Tmp);
5915
5916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5918 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5919 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5920
5921 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5922 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5923
5924 IEM_MC_ADVANCE_RIP();
5925 IEM_MC_END();
5926 }
5927 }
5928 else
5929 {
5930 /**
5931 * @opdone
5932 * @opcode 0x7e
5933 * @opcodesub rex.w=0
5934 * @oppfx 0x66
5935 * @opcpuid sse2
5936 * @opgroup og_sse2_simdint_datamove
5937 * @opxcpttype 5
5938 * @opfunction iemOp_movd_q_Vy_Ey
5939 * @optest op1=1 op2=2 -> op1=2
5940 * @optest op1=0 op2=-42 -> op1=-42
5941 */
5942 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5943 if (IEM_IS_MODRM_REG_MODE(bRm))
5944 {
5945 /* greg32, XMM */
5946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5947 IEM_MC_BEGIN(0, 1);
5948 IEM_MC_LOCAL(uint32_t, u32Tmp);
5949
5950 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5951 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5952
5953 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5954 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5955
5956 IEM_MC_ADVANCE_RIP();
5957 IEM_MC_END();
5958 }
5959 else
5960 {
5961 /* [mem32], XMM */
5962 IEM_MC_BEGIN(0, 2);
5963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5964 IEM_MC_LOCAL(uint32_t, u32Tmp);
5965
5966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5968 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5969 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5970
5971 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5972 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5973
5974 IEM_MC_ADVANCE_RIP();
5975 IEM_MC_END();
5976 }
5977 }
5978 return VINF_SUCCESS;
5979
5980}
5981
5982/**
5983 * @opcode 0x7e
5984 * @oppfx 0xf3
5985 * @opcpuid sse2
5986 * @opgroup og_sse2_pcksclr_datamove
5987 * @opxcpttype none
5988 * @optest op1=1 op2=2 -> op1=2
5989 * @optest op1=0 op2=-42 -> op1=-42
5990 */
5991FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5992{
5993 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5995 if (IEM_IS_MODRM_REG_MODE(bRm))
5996 {
5997 /*
5998 * Register, register.
5999 */
6000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6001 IEM_MC_BEGIN(0, 2);
6002 IEM_MC_LOCAL(uint64_t, uSrc);
6003
6004 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6005 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6006
6007 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6008 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
6009
6010 IEM_MC_ADVANCE_RIP();
6011 IEM_MC_END();
6012 }
6013 else
6014 {
6015 /*
6016 * Memory, register.
6017 */
6018 IEM_MC_BEGIN(0, 2);
6019 IEM_MC_LOCAL(uint64_t, uSrc);
6020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6021
6022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6024 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6025 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6026
6027 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6028 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
6029
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 }
6033 return VINF_SUCCESS;
6034}
6035
6036/* Opcode 0xf2 0x0f 0x7e - invalid */
6037
6038
6039/** Opcode 0x0f 0x7f - movq Qq, Pq */
6040FNIEMOP_DEF(iemOp_movq_Qq_Pq)
6041{
6042 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
6043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6044 if (IEM_IS_MODRM_REG_MODE(bRm))
6045 {
6046 /*
6047 * Register, register.
6048 */
6049 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
6050 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
6051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6052 IEM_MC_BEGIN(0, 1);
6053 IEM_MC_LOCAL(uint64_t, u64Tmp);
6054 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6055 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6056 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6057 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
6058 IEM_MC_FPU_TO_MMX_MODE();
6059 IEM_MC_ADVANCE_RIP();
6060 IEM_MC_END();
6061 }
6062 else
6063 {
6064 /*
6065 * Memory, Register.
6066 */
6067 IEM_MC_BEGIN(0, 2);
6068 IEM_MC_LOCAL(uint64_t, u64Tmp);
6069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6070
6071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6073 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6074 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6075
6076 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
6077 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
6078 IEM_MC_FPU_TO_MMX_MODE();
6079
6080 IEM_MC_ADVANCE_RIP();
6081 IEM_MC_END();
6082 }
6083 return VINF_SUCCESS;
6084}
6085
6086/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
6087FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
6088{
6089 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6091 if (IEM_IS_MODRM_REG_MODE(bRm))
6092 {
6093 /*
6094 * Register, register.
6095 */
6096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6097 IEM_MC_BEGIN(0, 0);
6098 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6099 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6100 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
6101 IEM_GET_MODRM_REG(pVCpu, bRm));
6102 IEM_MC_ADVANCE_RIP();
6103 IEM_MC_END();
6104 }
6105 else
6106 {
6107 /*
6108 * Register, memory.
6109 */
6110 IEM_MC_BEGIN(0, 2);
6111 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6113
6114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6116 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6118
6119 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
6120 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
6121
6122 IEM_MC_ADVANCE_RIP();
6123 IEM_MC_END();
6124 }
6125 return VINF_SUCCESS;
6126}
6127
6128/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
6129FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
6130{
6131 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6133 if (IEM_IS_MODRM_REG_MODE(bRm))
6134 {
6135 /*
6136 * Register, register.
6137 */
6138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6139 IEM_MC_BEGIN(0, 0);
6140 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6141 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6142 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
6143 IEM_GET_MODRM_REG(pVCpu, bRm));
6144 IEM_MC_ADVANCE_RIP();
6145 IEM_MC_END();
6146 }
6147 else
6148 {
6149 /*
6150 * Register, memory.
6151 */
6152 IEM_MC_BEGIN(0, 2);
6153 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6155
6156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
6160
6161 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
6162 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
6163
6164 IEM_MC_ADVANCE_RIP();
6165 IEM_MC_END();
6166 }
6167 return VINF_SUCCESS;
6168}
6169
6170/* Opcode 0xf2 0x0f 0x7f - invalid */
6171
6172
6173
6174/** Opcode 0x0f 0x80. */
6175FNIEMOP_DEF(iemOp_jo_Jv)
6176{
6177 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
6178 IEMOP_HLP_MIN_386();
6179 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6180 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6181 {
6182 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6184
6185 IEM_MC_BEGIN(0, 0);
6186 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6187 IEM_MC_REL_JMP_S16(i16Imm);
6188 } IEM_MC_ELSE() {
6189 IEM_MC_ADVANCE_RIP();
6190 } IEM_MC_ENDIF();
6191 IEM_MC_END();
6192 }
6193 else
6194 {
6195 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6197
6198 IEM_MC_BEGIN(0, 0);
6199 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6200 IEM_MC_REL_JMP_S32(i32Imm);
6201 } IEM_MC_ELSE() {
6202 IEM_MC_ADVANCE_RIP();
6203 } IEM_MC_ENDIF();
6204 IEM_MC_END();
6205 }
6206 return VINF_SUCCESS;
6207}
6208
6209
6210/** Opcode 0x0f 0x81. */
6211FNIEMOP_DEF(iemOp_jno_Jv)
6212{
6213 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
6214 IEMOP_HLP_MIN_386();
6215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6216 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6217 {
6218 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220
6221 IEM_MC_BEGIN(0, 0);
6222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6223 IEM_MC_ADVANCE_RIP();
6224 } IEM_MC_ELSE() {
6225 IEM_MC_REL_JMP_S16(i16Imm);
6226 } IEM_MC_ENDIF();
6227 IEM_MC_END();
6228 }
6229 else
6230 {
6231 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233
6234 IEM_MC_BEGIN(0, 0);
6235 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6236 IEM_MC_ADVANCE_RIP();
6237 } IEM_MC_ELSE() {
6238 IEM_MC_REL_JMP_S32(i32Imm);
6239 } IEM_MC_ENDIF();
6240 IEM_MC_END();
6241 }
6242 return VINF_SUCCESS;
6243}
6244
6245
6246/** Opcode 0x0f 0x82. */
6247FNIEMOP_DEF(iemOp_jc_Jv)
6248{
6249 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
6250 IEMOP_HLP_MIN_386();
6251 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6252 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6253 {
6254 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6256
6257 IEM_MC_BEGIN(0, 0);
6258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6259 IEM_MC_REL_JMP_S16(i16Imm);
6260 } IEM_MC_ELSE() {
6261 IEM_MC_ADVANCE_RIP();
6262 } IEM_MC_ENDIF();
6263 IEM_MC_END();
6264 }
6265 else
6266 {
6267 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6269
6270 IEM_MC_BEGIN(0, 0);
6271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6272 IEM_MC_REL_JMP_S32(i32Imm);
6273 } IEM_MC_ELSE() {
6274 IEM_MC_ADVANCE_RIP();
6275 } IEM_MC_ENDIF();
6276 IEM_MC_END();
6277 }
6278 return VINF_SUCCESS;
6279}
6280
6281
6282/** Opcode 0x0f 0x83. */
6283FNIEMOP_DEF(iemOp_jnc_Jv)
6284{
6285 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
6286 IEMOP_HLP_MIN_386();
6287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6288 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6289 {
6290 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6292
6293 IEM_MC_BEGIN(0, 0);
6294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6295 IEM_MC_ADVANCE_RIP();
6296 } IEM_MC_ELSE() {
6297 IEM_MC_REL_JMP_S16(i16Imm);
6298 } IEM_MC_ENDIF();
6299 IEM_MC_END();
6300 }
6301 else
6302 {
6303 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6305
6306 IEM_MC_BEGIN(0, 0);
6307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6308 IEM_MC_ADVANCE_RIP();
6309 } IEM_MC_ELSE() {
6310 IEM_MC_REL_JMP_S32(i32Imm);
6311 } IEM_MC_ENDIF();
6312 IEM_MC_END();
6313 }
6314 return VINF_SUCCESS;
6315}
6316
6317
6318/** Opcode 0x0f 0x84. */
6319FNIEMOP_DEF(iemOp_je_Jv)
6320{
6321 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
6322 IEMOP_HLP_MIN_386();
6323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6324 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6325 {
6326 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6328
6329 IEM_MC_BEGIN(0, 0);
6330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6331 IEM_MC_REL_JMP_S16(i16Imm);
6332 } IEM_MC_ELSE() {
6333 IEM_MC_ADVANCE_RIP();
6334 } IEM_MC_ENDIF();
6335 IEM_MC_END();
6336 }
6337 else
6338 {
6339 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6341
6342 IEM_MC_BEGIN(0, 0);
6343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6344 IEM_MC_REL_JMP_S32(i32Imm);
6345 } IEM_MC_ELSE() {
6346 IEM_MC_ADVANCE_RIP();
6347 } IEM_MC_ENDIF();
6348 IEM_MC_END();
6349 }
6350 return VINF_SUCCESS;
6351}
6352
6353
6354/** Opcode 0x0f 0x85. */
6355FNIEMOP_DEF(iemOp_jne_Jv)
6356{
6357 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
6358 IEMOP_HLP_MIN_386();
6359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6360 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6361 {
6362 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6364
6365 IEM_MC_BEGIN(0, 0);
6366 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6367 IEM_MC_ADVANCE_RIP();
6368 } IEM_MC_ELSE() {
6369 IEM_MC_REL_JMP_S16(i16Imm);
6370 } IEM_MC_ENDIF();
6371 IEM_MC_END();
6372 }
6373 else
6374 {
6375 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6377
6378 IEM_MC_BEGIN(0, 0);
6379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6380 IEM_MC_ADVANCE_RIP();
6381 } IEM_MC_ELSE() {
6382 IEM_MC_REL_JMP_S32(i32Imm);
6383 } IEM_MC_ENDIF();
6384 IEM_MC_END();
6385 }
6386 return VINF_SUCCESS;
6387}
6388
6389
6390/** Opcode 0x0f 0x86. */
6391FNIEMOP_DEF(iemOp_jbe_Jv)
6392{
6393 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
6394 IEMOP_HLP_MIN_386();
6395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6396 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6397 {
6398 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400
6401 IEM_MC_BEGIN(0, 0);
6402 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6403 IEM_MC_REL_JMP_S16(i16Imm);
6404 } IEM_MC_ELSE() {
6405 IEM_MC_ADVANCE_RIP();
6406 } IEM_MC_ENDIF();
6407 IEM_MC_END();
6408 }
6409 else
6410 {
6411 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413
6414 IEM_MC_BEGIN(0, 0);
6415 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6416 IEM_MC_REL_JMP_S32(i32Imm);
6417 } IEM_MC_ELSE() {
6418 IEM_MC_ADVANCE_RIP();
6419 } IEM_MC_ENDIF();
6420 IEM_MC_END();
6421 }
6422 return VINF_SUCCESS;
6423}
6424
6425
6426/** Opcode 0x0f 0x87. */
6427FNIEMOP_DEF(iemOp_jnbe_Jv)
6428{
6429 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
6430 IEMOP_HLP_MIN_386();
6431 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6432 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6433 {
6434 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6436
6437 IEM_MC_BEGIN(0, 0);
6438 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6439 IEM_MC_ADVANCE_RIP();
6440 } IEM_MC_ELSE() {
6441 IEM_MC_REL_JMP_S16(i16Imm);
6442 } IEM_MC_ENDIF();
6443 IEM_MC_END();
6444 }
6445 else
6446 {
6447 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449
6450 IEM_MC_BEGIN(0, 0);
6451 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6452 IEM_MC_ADVANCE_RIP();
6453 } IEM_MC_ELSE() {
6454 IEM_MC_REL_JMP_S32(i32Imm);
6455 } IEM_MC_ENDIF();
6456 IEM_MC_END();
6457 }
6458 return VINF_SUCCESS;
6459}
6460
6461
6462/** Opcode 0x0f 0x88. */
6463FNIEMOP_DEF(iemOp_js_Jv)
6464{
6465 IEMOP_MNEMONIC(js_Jv, "js Jv");
6466 IEMOP_HLP_MIN_386();
6467 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6468 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6469 {
6470 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6472
6473 IEM_MC_BEGIN(0, 0);
6474 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6475 IEM_MC_REL_JMP_S16(i16Imm);
6476 } IEM_MC_ELSE() {
6477 IEM_MC_ADVANCE_RIP();
6478 } IEM_MC_ENDIF();
6479 IEM_MC_END();
6480 }
6481 else
6482 {
6483 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6485
6486 IEM_MC_BEGIN(0, 0);
6487 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6488 IEM_MC_REL_JMP_S32(i32Imm);
6489 } IEM_MC_ELSE() {
6490 IEM_MC_ADVANCE_RIP();
6491 } IEM_MC_ENDIF();
6492 IEM_MC_END();
6493 }
6494 return VINF_SUCCESS;
6495}
6496
6497
6498/** Opcode 0x0f 0x89. */
6499FNIEMOP_DEF(iemOp_jns_Jv)
6500{
6501 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
6502 IEMOP_HLP_MIN_386();
6503 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6504 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6505 {
6506 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6508
6509 IEM_MC_BEGIN(0, 0);
6510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6511 IEM_MC_ADVANCE_RIP();
6512 } IEM_MC_ELSE() {
6513 IEM_MC_REL_JMP_S16(i16Imm);
6514 } IEM_MC_ENDIF();
6515 IEM_MC_END();
6516 }
6517 else
6518 {
6519 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6521
6522 IEM_MC_BEGIN(0, 0);
6523 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6524 IEM_MC_ADVANCE_RIP();
6525 } IEM_MC_ELSE() {
6526 IEM_MC_REL_JMP_S32(i32Imm);
6527 } IEM_MC_ENDIF();
6528 IEM_MC_END();
6529 }
6530 return VINF_SUCCESS;
6531}
6532
6533
6534/** Opcode 0x0f 0x8a. */
6535FNIEMOP_DEF(iemOp_jp_Jv)
6536{
6537 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
6538 IEMOP_HLP_MIN_386();
6539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6540 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6541 {
6542 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6544
6545 IEM_MC_BEGIN(0, 0);
6546 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6547 IEM_MC_REL_JMP_S16(i16Imm);
6548 } IEM_MC_ELSE() {
6549 IEM_MC_ADVANCE_RIP();
6550 } IEM_MC_ENDIF();
6551 IEM_MC_END();
6552 }
6553 else
6554 {
6555 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6557
6558 IEM_MC_BEGIN(0, 0);
6559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6560 IEM_MC_REL_JMP_S32(i32Imm);
6561 } IEM_MC_ELSE() {
6562 IEM_MC_ADVANCE_RIP();
6563 } IEM_MC_ENDIF();
6564 IEM_MC_END();
6565 }
6566 return VINF_SUCCESS;
6567}
6568
6569
6570/** Opcode 0x0f 0x8b. */
6571FNIEMOP_DEF(iemOp_jnp_Jv)
6572{
6573 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
6574 IEMOP_HLP_MIN_386();
6575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6576 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6577 {
6578 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580
6581 IEM_MC_BEGIN(0, 0);
6582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6583 IEM_MC_ADVANCE_RIP();
6584 } IEM_MC_ELSE() {
6585 IEM_MC_REL_JMP_S16(i16Imm);
6586 } IEM_MC_ENDIF();
6587 IEM_MC_END();
6588 }
6589 else
6590 {
6591 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6593
6594 IEM_MC_BEGIN(0, 0);
6595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6596 IEM_MC_ADVANCE_RIP();
6597 } IEM_MC_ELSE() {
6598 IEM_MC_REL_JMP_S32(i32Imm);
6599 } IEM_MC_ENDIF();
6600 IEM_MC_END();
6601 }
6602 return VINF_SUCCESS;
6603}
6604
6605
6606/** Opcode 0x0f 0x8c. */
6607FNIEMOP_DEF(iemOp_jl_Jv)
6608{
6609 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
6610 IEMOP_HLP_MIN_386();
6611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6612 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6613 {
6614 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6616
6617 IEM_MC_BEGIN(0, 0);
6618 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6619 IEM_MC_REL_JMP_S16(i16Imm);
6620 } IEM_MC_ELSE() {
6621 IEM_MC_ADVANCE_RIP();
6622 } IEM_MC_ENDIF();
6623 IEM_MC_END();
6624 }
6625 else
6626 {
6627 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6629
6630 IEM_MC_BEGIN(0, 0);
6631 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6632 IEM_MC_REL_JMP_S32(i32Imm);
6633 } IEM_MC_ELSE() {
6634 IEM_MC_ADVANCE_RIP();
6635 } IEM_MC_ENDIF();
6636 IEM_MC_END();
6637 }
6638 return VINF_SUCCESS;
6639}
6640
6641
6642/** Opcode 0x0f 0x8d. */
6643FNIEMOP_DEF(iemOp_jnl_Jv)
6644{
6645 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
6646 IEMOP_HLP_MIN_386();
6647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6648 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6649 {
6650 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6652
6653 IEM_MC_BEGIN(0, 0);
6654 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6655 IEM_MC_ADVANCE_RIP();
6656 } IEM_MC_ELSE() {
6657 IEM_MC_REL_JMP_S16(i16Imm);
6658 } IEM_MC_ENDIF();
6659 IEM_MC_END();
6660 }
6661 else
6662 {
6663 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665
6666 IEM_MC_BEGIN(0, 0);
6667 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6668 IEM_MC_ADVANCE_RIP();
6669 } IEM_MC_ELSE() {
6670 IEM_MC_REL_JMP_S32(i32Imm);
6671 } IEM_MC_ENDIF();
6672 IEM_MC_END();
6673 }
6674 return VINF_SUCCESS;
6675}
6676
6677
6678/** Opcode 0x0f 0x8e. */
6679FNIEMOP_DEF(iemOp_jle_Jv)
6680{
6681 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
6682 IEMOP_HLP_MIN_386();
6683 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6684 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6685 {
6686 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6688
6689 IEM_MC_BEGIN(0, 0);
6690 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6691 IEM_MC_REL_JMP_S16(i16Imm);
6692 } IEM_MC_ELSE() {
6693 IEM_MC_ADVANCE_RIP();
6694 } IEM_MC_ENDIF();
6695 IEM_MC_END();
6696 }
6697 else
6698 {
6699 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6701
6702 IEM_MC_BEGIN(0, 0);
6703 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6704 IEM_MC_REL_JMP_S32(i32Imm);
6705 } IEM_MC_ELSE() {
6706 IEM_MC_ADVANCE_RIP();
6707 } IEM_MC_ENDIF();
6708 IEM_MC_END();
6709 }
6710 return VINF_SUCCESS;
6711}
6712
6713
6714/** Opcode 0x0f 0x8f. */
6715FNIEMOP_DEF(iemOp_jnle_Jv)
6716{
6717 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
6718 IEMOP_HLP_MIN_386();
6719 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6720 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6721 {
6722 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6724
6725 IEM_MC_BEGIN(0, 0);
6726 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6727 IEM_MC_ADVANCE_RIP();
6728 } IEM_MC_ELSE() {
6729 IEM_MC_REL_JMP_S16(i16Imm);
6730 } IEM_MC_ENDIF();
6731 IEM_MC_END();
6732 }
6733 else
6734 {
6735 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737
6738 IEM_MC_BEGIN(0, 0);
6739 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6740 IEM_MC_ADVANCE_RIP();
6741 } IEM_MC_ELSE() {
6742 IEM_MC_REL_JMP_S32(i32Imm);
6743 } IEM_MC_ENDIF();
6744 IEM_MC_END();
6745 }
6746 return VINF_SUCCESS;
6747}
6748
6749
6750/** Opcode 0x0f 0x90. */
6751FNIEMOP_DEF(iemOp_seto_Eb)
6752{
6753 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6754 IEMOP_HLP_MIN_386();
6755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6756
6757 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6758 * any way. AMD says it's "unused", whatever that means. We're
6759 * ignoring for now. */
6760 if (IEM_IS_MODRM_REG_MODE(bRm))
6761 {
6762 /* register target */
6763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6764 IEM_MC_BEGIN(0, 0);
6765 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6766 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6767 } IEM_MC_ELSE() {
6768 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6769 } IEM_MC_ENDIF();
6770 IEM_MC_ADVANCE_RIP();
6771 IEM_MC_END();
6772 }
6773 else
6774 {
6775 /* memory target */
6776 IEM_MC_BEGIN(0, 1);
6777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6781 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6782 } IEM_MC_ELSE() {
6783 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6784 } IEM_MC_ENDIF();
6785 IEM_MC_ADVANCE_RIP();
6786 IEM_MC_END();
6787 }
6788 return VINF_SUCCESS;
6789}
6790
6791
6792/** Opcode 0x0f 0x91. */
6793FNIEMOP_DEF(iemOp_setno_Eb)
6794{
6795 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6796 IEMOP_HLP_MIN_386();
6797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6798
6799 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6800 * any way. AMD says it's "unused", whatever that means. We're
6801 * ignoring for now. */
6802 if (IEM_IS_MODRM_REG_MODE(bRm))
6803 {
6804 /* register target */
6805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6806 IEM_MC_BEGIN(0, 0);
6807 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6808 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6809 } IEM_MC_ELSE() {
6810 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6811 } IEM_MC_ENDIF();
6812 IEM_MC_ADVANCE_RIP();
6813 IEM_MC_END();
6814 }
6815 else
6816 {
6817 /* memory target */
6818 IEM_MC_BEGIN(0, 1);
6819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6823 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6824 } IEM_MC_ELSE() {
6825 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6826 } IEM_MC_ENDIF();
6827 IEM_MC_ADVANCE_RIP();
6828 IEM_MC_END();
6829 }
6830 return VINF_SUCCESS;
6831}
6832
6833
6834/** Opcode 0x0f 0x92. */
6835FNIEMOP_DEF(iemOp_setc_Eb)
6836{
6837 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6838 IEMOP_HLP_MIN_386();
6839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6840
6841 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6842 * any way. AMD says it's "unused", whatever that means. We're
6843 * ignoring for now. */
6844 if (IEM_IS_MODRM_REG_MODE(bRm))
6845 {
6846 /* register target */
6847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6848 IEM_MC_BEGIN(0, 0);
6849 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6850 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6851 } IEM_MC_ELSE() {
6852 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6853 } IEM_MC_ENDIF();
6854 IEM_MC_ADVANCE_RIP();
6855 IEM_MC_END();
6856 }
6857 else
6858 {
6859 /* memory target */
6860 IEM_MC_BEGIN(0, 1);
6861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6864 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6865 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6866 } IEM_MC_ELSE() {
6867 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6868 } IEM_MC_ENDIF();
6869 IEM_MC_ADVANCE_RIP();
6870 IEM_MC_END();
6871 }
6872 return VINF_SUCCESS;
6873}
6874
6875
6876/** Opcode 0x0f 0x93. */
6877FNIEMOP_DEF(iemOp_setnc_Eb)
6878{
6879 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6880 IEMOP_HLP_MIN_386();
6881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6882
6883 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6884 * any way. AMD says it's "unused", whatever that means. We're
6885 * ignoring for now. */
6886 if (IEM_IS_MODRM_REG_MODE(bRm))
6887 {
6888 /* register target */
6889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6890 IEM_MC_BEGIN(0, 0);
6891 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6892 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6893 } IEM_MC_ELSE() {
6894 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6895 } IEM_MC_ENDIF();
6896 IEM_MC_ADVANCE_RIP();
6897 IEM_MC_END();
6898 }
6899 else
6900 {
6901 /* memory target */
6902 IEM_MC_BEGIN(0, 1);
6903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6906 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6907 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6908 } IEM_MC_ELSE() {
6909 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6910 } IEM_MC_ENDIF();
6911 IEM_MC_ADVANCE_RIP();
6912 IEM_MC_END();
6913 }
6914 return VINF_SUCCESS;
6915}
6916
6917
6918/** Opcode 0x0f 0x94. */
6919FNIEMOP_DEF(iemOp_sete_Eb)
6920{
6921 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6922 IEMOP_HLP_MIN_386();
6923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6924
6925 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6926 * any way. AMD says it's "unused", whatever that means. We're
6927 * ignoring for now. */
6928 if (IEM_IS_MODRM_REG_MODE(bRm))
6929 {
6930 /* register target */
6931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6932 IEM_MC_BEGIN(0, 0);
6933 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6934 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6935 } IEM_MC_ELSE() {
6936 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6937 } IEM_MC_ENDIF();
6938 IEM_MC_ADVANCE_RIP();
6939 IEM_MC_END();
6940 }
6941 else
6942 {
6943 /* memory target */
6944 IEM_MC_BEGIN(0, 1);
6945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6948 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6949 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6950 } IEM_MC_ELSE() {
6951 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6952 } IEM_MC_ENDIF();
6953 IEM_MC_ADVANCE_RIP();
6954 IEM_MC_END();
6955 }
6956 return VINF_SUCCESS;
6957}
6958
6959
6960/** Opcode 0x0f 0x95. */
6961FNIEMOP_DEF(iemOp_setne_Eb)
6962{
6963 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6964 IEMOP_HLP_MIN_386();
6965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6966
6967 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6968 * any way. AMD says it's "unused", whatever that means. We're
6969 * ignoring for now. */
6970 if (IEM_IS_MODRM_REG_MODE(bRm))
6971 {
6972 /* register target */
6973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6974 IEM_MC_BEGIN(0, 0);
6975 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6976 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6977 } IEM_MC_ELSE() {
6978 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6979 } IEM_MC_ENDIF();
6980 IEM_MC_ADVANCE_RIP();
6981 IEM_MC_END();
6982 }
6983 else
6984 {
6985 /* memory target */
6986 IEM_MC_BEGIN(0, 1);
6987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6990 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6991 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6992 } IEM_MC_ELSE() {
6993 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6994 } IEM_MC_ENDIF();
6995 IEM_MC_ADVANCE_RIP();
6996 IEM_MC_END();
6997 }
6998 return VINF_SUCCESS;
6999}
7000
7001
7002/** Opcode 0x0f 0x96. */
7003FNIEMOP_DEF(iemOp_setbe_Eb)
7004{
7005 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
7006 IEMOP_HLP_MIN_386();
7007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7008
7009 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7010 * any way. AMD says it's "unused", whatever that means. We're
7011 * ignoring for now. */
7012 if (IEM_IS_MODRM_REG_MODE(bRm))
7013 {
7014 /* register target */
7015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7016 IEM_MC_BEGIN(0, 0);
7017 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7018 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7019 } IEM_MC_ELSE() {
7020 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7021 } IEM_MC_ENDIF();
7022 IEM_MC_ADVANCE_RIP();
7023 IEM_MC_END();
7024 }
7025 else
7026 {
7027 /* memory target */
7028 IEM_MC_BEGIN(0, 1);
7029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7032 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7033 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7034 } IEM_MC_ELSE() {
7035 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7036 } IEM_MC_ENDIF();
7037 IEM_MC_ADVANCE_RIP();
7038 IEM_MC_END();
7039 }
7040 return VINF_SUCCESS;
7041}
7042
7043
7044/** Opcode 0x0f 0x97. */
7045FNIEMOP_DEF(iemOp_setnbe_Eb)
7046{
7047 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
7048 IEMOP_HLP_MIN_386();
7049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7050
7051 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7052 * any way. AMD says it's "unused", whatever that means. We're
7053 * ignoring for now. */
7054 if (IEM_IS_MODRM_REG_MODE(bRm))
7055 {
7056 /* register target */
7057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7058 IEM_MC_BEGIN(0, 0);
7059 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7060 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7061 } IEM_MC_ELSE() {
7062 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7063 } IEM_MC_ENDIF();
7064 IEM_MC_ADVANCE_RIP();
7065 IEM_MC_END();
7066 }
7067 else
7068 {
7069 /* memory target */
7070 IEM_MC_BEGIN(0, 1);
7071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7074 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7075 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7076 } IEM_MC_ELSE() {
7077 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7078 } IEM_MC_ENDIF();
7079 IEM_MC_ADVANCE_RIP();
7080 IEM_MC_END();
7081 }
7082 return VINF_SUCCESS;
7083}
7084
7085
7086/** Opcode 0x0f 0x98. */
7087FNIEMOP_DEF(iemOp_sets_Eb)
7088{
7089 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
7090 IEMOP_HLP_MIN_386();
7091 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7092
7093 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7094 * any way. AMD says it's "unused", whatever that means. We're
7095 * ignoring for now. */
7096 if (IEM_IS_MODRM_REG_MODE(bRm))
7097 {
7098 /* register target */
7099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7100 IEM_MC_BEGIN(0, 0);
7101 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7102 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7103 } IEM_MC_ELSE() {
7104 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7105 } IEM_MC_ENDIF();
7106 IEM_MC_ADVANCE_RIP();
7107 IEM_MC_END();
7108 }
7109 else
7110 {
7111 /* memory target */
7112 IEM_MC_BEGIN(0, 1);
7113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7116 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7117 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7118 } IEM_MC_ELSE() {
7119 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7120 } IEM_MC_ENDIF();
7121 IEM_MC_ADVANCE_RIP();
7122 IEM_MC_END();
7123 }
7124 return VINF_SUCCESS;
7125}
7126
7127
7128/** Opcode 0x0f 0x99. */
7129FNIEMOP_DEF(iemOp_setns_Eb)
7130{
7131 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
7132 IEMOP_HLP_MIN_386();
7133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7134
7135 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7136 * any way. AMD says it's "unused", whatever that means. We're
7137 * ignoring for now. */
7138 if (IEM_IS_MODRM_REG_MODE(bRm))
7139 {
7140 /* register target */
7141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7142 IEM_MC_BEGIN(0, 0);
7143 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7144 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7145 } IEM_MC_ELSE() {
7146 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7147 } IEM_MC_ENDIF();
7148 IEM_MC_ADVANCE_RIP();
7149 IEM_MC_END();
7150 }
7151 else
7152 {
7153 /* memory target */
7154 IEM_MC_BEGIN(0, 1);
7155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7158 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7159 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7160 } IEM_MC_ELSE() {
7161 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7162 } IEM_MC_ENDIF();
7163 IEM_MC_ADVANCE_RIP();
7164 IEM_MC_END();
7165 }
7166 return VINF_SUCCESS;
7167}
7168
7169
7170/** Opcode 0x0f 0x9a. */
7171FNIEMOP_DEF(iemOp_setp_Eb)
7172{
7173 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
7174 IEMOP_HLP_MIN_386();
7175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7176
7177 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7178 * any way. AMD says it's "unused", whatever that means. We're
7179 * ignoring for now. */
7180 if (IEM_IS_MODRM_REG_MODE(bRm))
7181 {
7182 /* register target */
7183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7184 IEM_MC_BEGIN(0, 0);
7185 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7186 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7187 } IEM_MC_ELSE() {
7188 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7189 } IEM_MC_ENDIF();
7190 IEM_MC_ADVANCE_RIP();
7191 IEM_MC_END();
7192 }
7193 else
7194 {
7195 /* memory target */
7196 IEM_MC_BEGIN(0, 1);
7197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7200 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7201 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7202 } IEM_MC_ELSE() {
7203 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7204 } IEM_MC_ENDIF();
7205 IEM_MC_ADVANCE_RIP();
7206 IEM_MC_END();
7207 }
7208 return VINF_SUCCESS;
7209}
7210
7211
7212/** Opcode 0x0f 0x9b. */
7213FNIEMOP_DEF(iemOp_setnp_Eb)
7214{
7215 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
7216 IEMOP_HLP_MIN_386();
7217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7218
7219 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7220 * any way. AMD says it's "unused", whatever that means. We're
7221 * ignoring for now. */
7222 if (IEM_IS_MODRM_REG_MODE(bRm))
7223 {
7224 /* register target */
7225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7226 IEM_MC_BEGIN(0, 0);
7227 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7228 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7229 } IEM_MC_ELSE() {
7230 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7231 } IEM_MC_ENDIF();
7232 IEM_MC_ADVANCE_RIP();
7233 IEM_MC_END();
7234 }
7235 else
7236 {
7237 /* memory target */
7238 IEM_MC_BEGIN(0, 1);
7239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7242 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7243 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7244 } IEM_MC_ELSE() {
7245 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7246 } IEM_MC_ENDIF();
7247 IEM_MC_ADVANCE_RIP();
7248 IEM_MC_END();
7249 }
7250 return VINF_SUCCESS;
7251}
7252
7253
7254/** Opcode 0x0f 0x9c. */
7255FNIEMOP_DEF(iemOp_setl_Eb)
7256{
7257 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
7258 IEMOP_HLP_MIN_386();
7259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7260
7261 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7262 * any way. AMD says it's "unused", whatever that means. We're
7263 * ignoring for now. */
7264 if (IEM_IS_MODRM_REG_MODE(bRm))
7265 {
7266 /* register target */
7267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7268 IEM_MC_BEGIN(0, 0);
7269 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7270 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7271 } IEM_MC_ELSE() {
7272 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7273 } IEM_MC_ENDIF();
7274 IEM_MC_ADVANCE_RIP();
7275 IEM_MC_END();
7276 }
7277 else
7278 {
7279 /* memory target */
7280 IEM_MC_BEGIN(0, 1);
7281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7284 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7285 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7286 } IEM_MC_ELSE() {
7287 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7288 } IEM_MC_ENDIF();
7289 IEM_MC_ADVANCE_RIP();
7290 IEM_MC_END();
7291 }
7292 return VINF_SUCCESS;
7293}
7294
7295
7296/** Opcode 0x0f 0x9d. */
7297FNIEMOP_DEF(iemOp_setnl_Eb)
7298{
7299 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
7300 IEMOP_HLP_MIN_386();
7301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7302
7303 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7304 * any way. AMD says it's "unused", whatever that means. We're
7305 * ignoring for now. */
7306 if (IEM_IS_MODRM_REG_MODE(bRm))
7307 {
7308 /* register target */
7309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7310 IEM_MC_BEGIN(0, 0);
7311 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7312 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7313 } IEM_MC_ELSE() {
7314 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7315 } IEM_MC_ENDIF();
7316 IEM_MC_ADVANCE_RIP();
7317 IEM_MC_END();
7318 }
7319 else
7320 {
7321 /* memory target */
7322 IEM_MC_BEGIN(0, 1);
7323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7326 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7327 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7328 } IEM_MC_ELSE() {
7329 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7330 } IEM_MC_ENDIF();
7331 IEM_MC_ADVANCE_RIP();
7332 IEM_MC_END();
7333 }
7334 return VINF_SUCCESS;
7335}
7336
7337
7338/** Opcode 0x0f 0x9e. */
7339FNIEMOP_DEF(iemOp_setle_Eb)
7340{
7341 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
7342 IEMOP_HLP_MIN_386();
7343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7344
7345 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7346 * any way. AMD says it's "unused", whatever that means. We're
7347 * ignoring for now. */
7348 if (IEM_IS_MODRM_REG_MODE(bRm))
7349 {
7350 /* register target */
7351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7352 IEM_MC_BEGIN(0, 0);
7353 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7354 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7355 } IEM_MC_ELSE() {
7356 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7357 } IEM_MC_ENDIF();
7358 IEM_MC_ADVANCE_RIP();
7359 IEM_MC_END();
7360 }
7361 else
7362 {
7363 /* memory target */
7364 IEM_MC_BEGIN(0, 1);
7365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7368 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7369 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7370 } IEM_MC_ELSE() {
7371 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7372 } IEM_MC_ENDIF();
7373 IEM_MC_ADVANCE_RIP();
7374 IEM_MC_END();
7375 }
7376 return VINF_SUCCESS;
7377}
7378
7379
7380/** Opcode 0x0f 0x9f. */
7381FNIEMOP_DEF(iemOp_setnle_Eb)
7382{
7383 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
7384 IEMOP_HLP_MIN_386();
7385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7386
7387 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7388 * any way. AMD says it's "unused", whatever that means. We're
7389 * ignoring for now. */
7390 if (IEM_IS_MODRM_REG_MODE(bRm))
7391 {
7392 /* register target */
7393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7394 IEM_MC_BEGIN(0, 0);
7395 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7396 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7397 } IEM_MC_ELSE() {
7398 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7399 } IEM_MC_ENDIF();
7400 IEM_MC_ADVANCE_RIP();
7401 IEM_MC_END();
7402 }
7403 else
7404 {
7405 /* memory target */
7406 IEM_MC_BEGIN(0, 1);
7407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7410 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7411 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7412 } IEM_MC_ELSE() {
7413 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7414 } IEM_MC_ENDIF();
7415 IEM_MC_ADVANCE_RIP();
7416 IEM_MC_END();
7417 }
7418 return VINF_SUCCESS;
7419}
7420
7421
7422/**
7423 * Common 'push segment-register' helper.
7424 */
7425FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
7426{
7427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7428 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
7429 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7430
7431 switch (pVCpu->iem.s.enmEffOpSize)
7432 {
7433 case IEMMODE_16BIT:
7434 IEM_MC_BEGIN(0, 1);
7435 IEM_MC_LOCAL(uint16_t, u16Value);
7436 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
7437 IEM_MC_PUSH_U16(u16Value);
7438 IEM_MC_ADVANCE_RIP();
7439 IEM_MC_END();
7440 break;
7441
7442 case IEMMODE_32BIT:
7443 IEM_MC_BEGIN(0, 1);
7444 IEM_MC_LOCAL(uint32_t, u32Value);
7445 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
7446 IEM_MC_PUSH_U32_SREG(u32Value);
7447 IEM_MC_ADVANCE_RIP();
7448 IEM_MC_END();
7449 break;
7450
7451 case IEMMODE_64BIT:
7452 IEM_MC_BEGIN(0, 1);
7453 IEM_MC_LOCAL(uint64_t, u64Value);
7454 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
7455 IEM_MC_PUSH_U64(u64Value);
7456 IEM_MC_ADVANCE_RIP();
7457 IEM_MC_END();
7458 break;
7459 }
7460
7461 return VINF_SUCCESS;
7462}
7463
7464
7465/** Opcode 0x0f 0xa0. */
7466FNIEMOP_DEF(iemOp_push_fs)
7467{
7468 IEMOP_MNEMONIC(push_fs, "push fs");
7469 IEMOP_HLP_MIN_386();
7470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7471 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
7472}
7473
7474
7475/** Opcode 0x0f 0xa1. */
7476FNIEMOP_DEF(iemOp_pop_fs)
7477{
7478 IEMOP_MNEMONIC(pop_fs, "pop fs");
7479 IEMOP_HLP_MIN_386();
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7481 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
7482}
7483
7484
7485/** Opcode 0x0f 0xa2. */
7486FNIEMOP_DEF(iemOp_cpuid)
7487{
7488 IEMOP_MNEMONIC(cpuid, "cpuid");
7489 IEMOP_HLP_MIN_486(); /* not all 486es. */
7490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7491 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
7492}
7493
7494
7495/**
7496 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
7497 * iemOp_bts_Ev_Gv.
7498 */
7499FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
7500{
7501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7502 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7503
7504 if (IEM_IS_MODRM_REG_MODE(bRm))
7505 {
7506 /* register destination. */
7507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7508 switch (pVCpu->iem.s.enmEffOpSize)
7509 {
7510 case IEMMODE_16BIT:
7511 IEM_MC_BEGIN(3, 0);
7512 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7513 IEM_MC_ARG(uint16_t, u16Src, 1);
7514 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7515
7516 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7517 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
7518 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7519 IEM_MC_REF_EFLAGS(pEFlags);
7520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7521
7522 IEM_MC_ADVANCE_RIP();
7523 IEM_MC_END();
7524 return VINF_SUCCESS;
7525
7526 case IEMMODE_32BIT:
7527 IEM_MC_BEGIN(3, 0);
7528 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7529 IEM_MC_ARG(uint32_t, u32Src, 1);
7530 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7531
7532 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7533 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
7534 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7535 IEM_MC_REF_EFLAGS(pEFlags);
7536 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7537
7538 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7539 IEM_MC_ADVANCE_RIP();
7540 IEM_MC_END();
7541 return VINF_SUCCESS;
7542
7543 case IEMMODE_64BIT:
7544 IEM_MC_BEGIN(3, 0);
7545 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7546 IEM_MC_ARG(uint64_t, u64Src, 1);
7547 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7548
7549 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7550 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
7551 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7552 IEM_MC_REF_EFLAGS(pEFlags);
7553 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7554
7555 IEM_MC_ADVANCE_RIP();
7556 IEM_MC_END();
7557 return VINF_SUCCESS;
7558
7559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7560 }
7561 }
7562 else
7563 {
7564 /* memory destination. */
7565
7566 uint32_t fAccess;
7567 if (pImpl->pfnLockedU16)
7568 fAccess = IEM_ACCESS_DATA_RW;
7569 else /* BT */
7570 fAccess = IEM_ACCESS_DATA_R;
7571
7572 /** @todo test negative bit offsets! */
7573 switch (pVCpu->iem.s.enmEffOpSize)
7574 {
7575 case IEMMODE_16BIT:
7576 IEM_MC_BEGIN(3, 2);
7577 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7578 IEM_MC_ARG(uint16_t, u16Src, 1);
7579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7581 IEM_MC_LOCAL(int16_t, i16AddrAdj);
7582
7583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7584 if (pImpl->pfnLockedU16)
7585 IEMOP_HLP_DONE_DECODING();
7586 else
7587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7588 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7589 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
7590 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
7591 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
7592 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
7593 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
7594 IEM_MC_FETCH_EFLAGS(EFlags);
7595
7596 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7597 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7598 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7599 else
7600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7601 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7602
7603 IEM_MC_COMMIT_EFLAGS(EFlags);
7604 IEM_MC_ADVANCE_RIP();
7605 IEM_MC_END();
7606 return VINF_SUCCESS;
7607
7608 case IEMMODE_32BIT:
7609 IEM_MC_BEGIN(3, 2);
7610 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7611 IEM_MC_ARG(uint32_t, u32Src, 1);
7612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7614 IEM_MC_LOCAL(int32_t, i32AddrAdj);
7615
7616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7617 if (pImpl->pfnLockedU16)
7618 IEMOP_HLP_DONE_DECODING();
7619 else
7620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7621 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7622 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
7623 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
7624 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
7625 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
7626 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
7627 IEM_MC_FETCH_EFLAGS(EFlags);
7628
7629 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7630 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7631 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7632 else
7633 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7634 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7635
7636 IEM_MC_COMMIT_EFLAGS(EFlags);
7637 IEM_MC_ADVANCE_RIP();
7638 IEM_MC_END();
7639 return VINF_SUCCESS;
7640
7641 case IEMMODE_64BIT:
7642 IEM_MC_BEGIN(3, 2);
7643 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7644 IEM_MC_ARG(uint64_t, u64Src, 1);
7645 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7647 IEM_MC_LOCAL(int64_t, i64AddrAdj);
7648
7649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7650 if (pImpl->pfnLockedU16)
7651 IEMOP_HLP_DONE_DECODING();
7652 else
7653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7654 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7655 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
7656 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
7657 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
7658 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
7659 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
7660 IEM_MC_FETCH_EFLAGS(EFlags);
7661
7662 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7663 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7664 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7665 else
7666 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7667 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7668
7669 IEM_MC_COMMIT_EFLAGS(EFlags);
7670 IEM_MC_ADVANCE_RIP();
7671 IEM_MC_END();
7672 return VINF_SUCCESS;
7673
7674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7675 }
7676 }
7677}
7678
7679
7680/** Opcode 0x0f 0xa3. */
7681FNIEMOP_DEF(iemOp_bt_Ev_Gv)
7682{
7683 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
7684 IEMOP_HLP_MIN_386();
7685 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
7686}
7687
7688
7689/**
7690 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
7691 */
7692FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
7693{
7694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7695 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7696
7697 if (IEM_IS_MODRM_REG_MODE(bRm))
7698 {
7699 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7701
7702 switch (pVCpu->iem.s.enmEffOpSize)
7703 {
7704 case IEMMODE_16BIT:
7705 IEM_MC_BEGIN(4, 0);
7706 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7707 IEM_MC_ARG(uint16_t, u16Src, 1);
7708 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7709 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7710
7711 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7712 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7713 IEM_MC_REF_EFLAGS(pEFlags);
7714 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7715
7716 IEM_MC_ADVANCE_RIP();
7717 IEM_MC_END();
7718 return VINF_SUCCESS;
7719
7720 case IEMMODE_32BIT:
7721 IEM_MC_BEGIN(4, 0);
7722 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7723 IEM_MC_ARG(uint32_t, u32Src, 1);
7724 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7725 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7726
7727 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7728 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7729 IEM_MC_REF_EFLAGS(pEFlags);
7730 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7731
7732 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7733 IEM_MC_ADVANCE_RIP();
7734 IEM_MC_END();
7735 return VINF_SUCCESS;
7736
7737 case IEMMODE_64BIT:
7738 IEM_MC_BEGIN(4, 0);
7739 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7740 IEM_MC_ARG(uint64_t, u64Src, 1);
7741 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7742 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7743
7744 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7745 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7746 IEM_MC_REF_EFLAGS(pEFlags);
7747 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7748
7749 IEM_MC_ADVANCE_RIP();
7750 IEM_MC_END();
7751 return VINF_SUCCESS;
7752
7753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7754 }
7755 }
7756 else
7757 {
7758 switch (pVCpu->iem.s.enmEffOpSize)
7759 {
7760 case IEMMODE_16BIT:
7761 IEM_MC_BEGIN(4, 2);
7762 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7763 IEM_MC_ARG(uint16_t, u16Src, 1);
7764 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7765 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7767
7768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7769 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7770 IEM_MC_ASSIGN(cShiftArg, cShift);
7771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7772 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7773 IEM_MC_FETCH_EFLAGS(EFlags);
7774 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7775 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7776
7777 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7778 IEM_MC_COMMIT_EFLAGS(EFlags);
7779 IEM_MC_ADVANCE_RIP();
7780 IEM_MC_END();
7781 return VINF_SUCCESS;
7782
7783 case IEMMODE_32BIT:
7784 IEM_MC_BEGIN(4, 2);
7785 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7786 IEM_MC_ARG(uint32_t, u32Src, 1);
7787 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7788 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7790
7791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7792 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7793 IEM_MC_ASSIGN(cShiftArg, cShift);
7794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7795 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7796 IEM_MC_FETCH_EFLAGS(EFlags);
7797 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7798 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7799
7800 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7801 IEM_MC_COMMIT_EFLAGS(EFlags);
7802 IEM_MC_ADVANCE_RIP();
7803 IEM_MC_END();
7804 return VINF_SUCCESS;
7805
7806 case IEMMODE_64BIT:
7807 IEM_MC_BEGIN(4, 2);
7808 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7809 IEM_MC_ARG(uint64_t, u64Src, 1);
7810 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7811 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7813
7814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7815 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7816 IEM_MC_ASSIGN(cShiftArg, cShift);
7817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7818 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7819 IEM_MC_FETCH_EFLAGS(EFlags);
7820 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7821 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7822
7823 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7824 IEM_MC_COMMIT_EFLAGS(EFlags);
7825 IEM_MC_ADVANCE_RIP();
7826 IEM_MC_END();
7827 return VINF_SUCCESS;
7828
7829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7830 }
7831 }
7832}
7833
7834
7835/**
7836 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7837 */
7838FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7839{
7840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7841 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7842
7843 if (IEM_IS_MODRM_REG_MODE(bRm))
7844 {
7845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7846
7847 switch (pVCpu->iem.s.enmEffOpSize)
7848 {
7849 case IEMMODE_16BIT:
7850 IEM_MC_BEGIN(4, 0);
7851 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7852 IEM_MC_ARG(uint16_t, u16Src, 1);
7853 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7854 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7855
7856 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7857 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7858 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7859 IEM_MC_REF_EFLAGS(pEFlags);
7860 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7861
7862 IEM_MC_ADVANCE_RIP();
7863 IEM_MC_END();
7864 return VINF_SUCCESS;
7865
7866 case IEMMODE_32BIT:
7867 IEM_MC_BEGIN(4, 0);
7868 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7869 IEM_MC_ARG(uint32_t, u32Src, 1);
7870 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7871 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7872
7873 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7874 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7875 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7876 IEM_MC_REF_EFLAGS(pEFlags);
7877 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7878
7879 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7880 IEM_MC_ADVANCE_RIP();
7881 IEM_MC_END();
7882 return VINF_SUCCESS;
7883
7884 case IEMMODE_64BIT:
7885 IEM_MC_BEGIN(4, 0);
7886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7887 IEM_MC_ARG(uint64_t, u64Src, 1);
7888 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7889 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7890
7891 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7892 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7893 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7894 IEM_MC_REF_EFLAGS(pEFlags);
7895 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7896
7897 IEM_MC_ADVANCE_RIP();
7898 IEM_MC_END();
7899 return VINF_SUCCESS;
7900
7901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7902 }
7903 }
7904 else
7905 {
7906 switch (pVCpu->iem.s.enmEffOpSize)
7907 {
7908 case IEMMODE_16BIT:
7909 IEM_MC_BEGIN(4, 2);
7910 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7911 IEM_MC_ARG(uint16_t, u16Src, 1);
7912 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7913 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7915
7916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7918 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7919 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7920 IEM_MC_FETCH_EFLAGS(EFlags);
7921 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7922 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7923
7924 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7925 IEM_MC_COMMIT_EFLAGS(EFlags);
7926 IEM_MC_ADVANCE_RIP();
7927 IEM_MC_END();
7928 return VINF_SUCCESS;
7929
7930 case IEMMODE_32BIT:
7931 IEM_MC_BEGIN(4, 2);
7932 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7933 IEM_MC_ARG(uint32_t, u32Src, 1);
7934 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7935 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7937
7938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7940 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7941 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7942 IEM_MC_FETCH_EFLAGS(EFlags);
7943 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7944 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7945
7946 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7947 IEM_MC_COMMIT_EFLAGS(EFlags);
7948 IEM_MC_ADVANCE_RIP();
7949 IEM_MC_END();
7950 return VINF_SUCCESS;
7951
7952 case IEMMODE_64BIT:
7953 IEM_MC_BEGIN(4, 2);
7954 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7955 IEM_MC_ARG(uint64_t, u64Src, 1);
7956 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7957 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7959
7960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7962 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7963 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7964 IEM_MC_FETCH_EFLAGS(EFlags);
7965 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7966 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7967
7968 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7969 IEM_MC_COMMIT_EFLAGS(EFlags);
7970 IEM_MC_ADVANCE_RIP();
7971 IEM_MC_END();
7972 return VINF_SUCCESS;
7973
7974 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7975 }
7976 }
7977}
7978
7979
7980
7981/** Opcode 0x0f 0xa4. */
7982FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7983{
7984 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7985 IEMOP_HLP_MIN_386();
7986 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7987}
7988
7989
7990/** Opcode 0x0f 0xa5. */
7991FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7992{
7993 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7994 IEMOP_HLP_MIN_386();
7995 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7996}
7997
7998
7999/** Opcode 0x0f 0xa8. */
8000FNIEMOP_DEF(iemOp_push_gs)
8001{
8002 IEMOP_MNEMONIC(push_gs, "push gs");
8003 IEMOP_HLP_MIN_386();
8004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8005 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
8006}
8007
8008
8009/** Opcode 0x0f 0xa9. */
8010FNIEMOP_DEF(iemOp_pop_gs)
8011{
8012 IEMOP_MNEMONIC(pop_gs, "pop gs");
8013 IEMOP_HLP_MIN_386();
8014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8015 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
8016}
8017
8018
8019/** Opcode 0x0f 0xaa. */
8020FNIEMOP_DEF(iemOp_rsm)
8021{
8022 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
8023 IEMOP_HLP_MIN_386(); /* 386SL and later. */
8024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8025 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
8026}
8027
8028
8029
8030/** Opcode 0x0f 0xab. */
8031FNIEMOP_DEF(iemOp_bts_Ev_Gv)
8032{
8033 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
8034 IEMOP_HLP_MIN_386();
8035 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
8036}
8037
8038
8039/** Opcode 0x0f 0xac. */
8040FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
8041{
8042 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
8043 IEMOP_HLP_MIN_386();
8044 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
8045}
8046
8047
8048/** Opcode 0x0f 0xad. */
8049FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
8050{
8051 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
8052 IEMOP_HLP_MIN_386();
8053 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
8054}
8055
8056
8057/** Opcode 0x0f 0xae mem/0. */
8058FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
8059{
8060 IEMOP_MNEMONIC(fxsave, "fxsave m512");
8061 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
8062 return IEMOP_RAISE_INVALID_OPCODE();
8063
8064 IEM_MC_BEGIN(3, 1);
8065 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8066 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8067 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8070 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8071 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8072 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
8073 IEM_MC_END();
8074 return VINF_SUCCESS;
8075}
8076
8077
8078/** Opcode 0x0f 0xae mem/1. */
8079FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
8080{
8081 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
8082 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
8083 return IEMOP_RAISE_INVALID_OPCODE();
8084
8085 IEM_MC_BEGIN(3, 1);
8086 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8087 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8088 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8091 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8092 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8093 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
8094 IEM_MC_END();
8095 return VINF_SUCCESS;
8096}
8097
8098
8099/**
8100 * @opmaps grp15
8101 * @opcode !11/2
8102 * @oppfx none
8103 * @opcpuid sse
8104 * @opgroup og_sse_mxcsrsm
8105 * @opxcpttype 5
8106 * @optest op1=0 -> mxcsr=0
8107 * @optest op1=0x2083 -> mxcsr=0x2083
8108 * @optest op1=0xfffffffe -> value.xcpt=0xd
8109 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
8110 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
8111 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
8112 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
8113 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
8114 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
8115 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
8116 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
8117 */
8118FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
8119{
8120 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8121 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
8122 return IEMOP_RAISE_INVALID_OPCODE();
8123
8124 IEM_MC_BEGIN(2, 0);
8125 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8126 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8130 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8131 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
8132 IEM_MC_END();
8133 return VINF_SUCCESS;
8134}
8135
8136
8137/**
8138 * @opmaps grp15
8139 * @opcode !11/3
8140 * @oppfx none
8141 * @opcpuid sse
8142 * @opgroup og_sse_mxcsrsm
8143 * @opxcpttype 5
8144 * @optest mxcsr=0 -> op1=0
8145 * @optest mxcsr=0x2083 -> op1=0x2083
8146 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
8147 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
8148 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
8149 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
8150 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
8151 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
8152 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
8153 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
8154 */
8155FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
8156{
8157 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8158 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
8159 return IEMOP_RAISE_INVALID_OPCODE();
8160
8161 IEM_MC_BEGIN(2, 0);
8162 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8163 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8166 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8167 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8168 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
8169 IEM_MC_END();
8170 return VINF_SUCCESS;
8171}
8172
8173
8174/**
8175 * @opmaps grp15
8176 * @opcode !11/4
8177 * @oppfx none
8178 * @opcpuid xsave
8179 * @opgroup og_system
8180 * @opxcpttype none
8181 */
8182FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
8183{
8184 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
8185 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
8186 return IEMOP_RAISE_INVALID_OPCODE();
8187
8188 IEM_MC_BEGIN(3, 0);
8189 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8190 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8191 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8194 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8195 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8196 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
8197 IEM_MC_END();
8198 return VINF_SUCCESS;
8199}
8200
8201
8202/**
8203 * @opmaps grp15
8204 * @opcode !11/5
8205 * @oppfx none
8206 * @opcpuid xsave
8207 * @opgroup og_system
8208 * @opxcpttype none
8209 */
8210FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
8211{
8212 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
8213 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
8214 return IEMOP_RAISE_INVALID_OPCODE();
8215
8216 IEM_MC_BEGIN(3, 0);
8217 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8218 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8219 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8222 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8223 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8224 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
8225 IEM_MC_END();
8226 return VINF_SUCCESS;
8227}
8228
8229/** Opcode 0x0f 0xae mem/6. */
8230FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
8231
8232/**
8233 * @opmaps grp15
8234 * @opcode !11/7
8235 * @oppfx none
8236 * @opcpuid clfsh
8237 * @opgroup og_cachectl
8238 * @optest op1=1 ->
8239 */
8240FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
8241{
8242 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8243 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
8244 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
8245
8246 IEM_MC_BEGIN(2, 0);
8247 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8248 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8251 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8252 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
8253 IEM_MC_END();
8254 return VINF_SUCCESS;
8255}
8256
8257/**
8258 * @opmaps grp15
8259 * @opcode !11/7
8260 * @oppfx 0x66
8261 * @opcpuid clflushopt
8262 * @opgroup og_cachectl
8263 * @optest op1=1 ->
8264 */
8265FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
8266{
8267 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8268 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
8269 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
8270
8271 IEM_MC_BEGIN(2, 0);
8272 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8273 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8277 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
8278 IEM_MC_END();
8279 return VINF_SUCCESS;
8280}
8281
8282
8283/** Opcode 0x0f 0xae 11b/5. */
8284FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
8285{
8286 RT_NOREF_PV(bRm);
8287 IEMOP_MNEMONIC(lfence, "lfence");
8288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8289 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8290 return IEMOP_RAISE_INVALID_OPCODE();
8291
8292 IEM_MC_BEGIN(0, 0);
8293#ifndef RT_ARCH_ARM64
8294 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8295#endif
8296 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
8297#ifndef RT_ARCH_ARM64
8298 else
8299 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8300#endif
8301 IEM_MC_ADVANCE_RIP();
8302 IEM_MC_END();
8303 return VINF_SUCCESS;
8304}
8305
8306
8307/** Opcode 0x0f 0xae 11b/6. */
8308FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
8309{
8310 RT_NOREF_PV(bRm);
8311 IEMOP_MNEMONIC(mfence, "mfence");
8312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8313 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8314 return IEMOP_RAISE_INVALID_OPCODE();
8315
8316 IEM_MC_BEGIN(0, 0);
8317#ifndef RT_ARCH_ARM64
8318 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8319#endif
8320 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
8321#ifndef RT_ARCH_ARM64
8322 else
8323 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8324#endif
8325 IEM_MC_ADVANCE_RIP();
8326 IEM_MC_END();
8327 return VINF_SUCCESS;
8328}
8329
8330
8331/** Opcode 0x0f 0xae 11b/7. */
8332FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
8333{
8334 RT_NOREF_PV(bRm);
8335 IEMOP_MNEMONIC(sfence, "sfence");
8336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8337 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8338 return IEMOP_RAISE_INVALID_OPCODE();
8339
8340 IEM_MC_BEGIN(0, 0);
8341#ifndef RT_ARCH_ARM64
8342 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8343#endif
8344 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
8345#ifndef RT_ARCH_ARM64
8346 else
8347 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8348#endif
8349 IEM_MC_ADVANCE_RIP();
8350 IEM_MC_END();
8351 return VINF_SUCCESS;
8352}
8353
8354
8355/** Opcode 0xf3 0x0f 0xae 11b/0. */
8356FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
8357{
8358 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
8359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8360 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8361 {
8362 IEM_MC_BEGIN(1, 0);
8363 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8364 IEM_MC_ARG(uint64_t, u64Dst, 0);
8365 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
8366 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8367 IEM_MC_ADVANCE_RIP();
8368 IEM_MC_END();
8369 }
8370 else
8371 {
8372 IEM_MC_BEGIN(1, 0);
8373 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8374 IEM_MC_ARG(uint32_t, u32Dst, 0);
8375 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
8376 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8377 IEM_MC_ADVANCE_RIP();
8378 IEM_MC_END();
8379 }
8380 return VINF_SUCCESS;
8381}
8382
8383
8384/** Opcode 0xf3 0x0f 0xae 11b/1. */
8385FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
8386{
8387 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
8388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8389 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8390 {
8391 IEM_MC_BEGIN(1, 0);
8392 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8393 IEM_MC_ARG(uint64_t, u64Dst, 0);
8394 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
8395 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8396 IEM_MC_ADVANCE_RIP();
8397 IEM_MC_END();
8398 }
8399 else
8400 {
8401 IEM_MC_BEGIN(1, 0);
8402 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8403 IEM_MC_ARG(uint32_t, u32Dst, 0);
8404 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
8405 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8406 IEM_MC_ADVANCE_RIP();
8407 IEM_MC_END();
8408 }
8409 return VINF_SUCCESS;
8410}
8411
8412
8413/** Opcode 0xf3 0x0f 0xae 11b/2. */
8414FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
8415{
8416 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
8417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8418 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8419 {
8420 IEM_MC_BEGIN(1, 0);
8421 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8422 IEM_MC_ARG(uint64_t, u64Dst, 0);
8423 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8424 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8425 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
8426 IEM_MC_ADVANCE_RIP();
8427 IEM_MC_END();
8428 }
8429 else
8430 {
8431 IEM_MC_BEGIN(1, 0);
8432 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8433 IEM_MC_ARG(uint32_t, u32Dst, 0);
8434 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8435 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
8436 IEM_MC_ADVANCE_RIP();
8437 IEM_MC_END();
8438 }
8439 return VINF_SUCCESS;
8440}
8441
8442
8443/** Opcode 0xf3 0x0f 0xae 11b/3. */
8444FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
8445{
8446 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
8447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8448 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8449 {
8450 IEM_MC_BEGIN(1, 0);
8451 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8452 IEM_MC_ARG(uint64_t, u64Dst, 0);
8453 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8454 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8455 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
8456 IEM_MC_ADVANCE_RIP();
8457 IEM_MC_END();
8458 }
8459 else
8460 {
8461 IEM_MC_BEGIN(1, 0);
8462 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8463 IEM_MC_ARG(uint32_t, u32Dst, 0);
8464 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8465 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
8466 IEM_MC_ADVANCE_RIP();
8467 IEM_MC_END();
8468 }
8469 return VINF_SUCCESS;
8470}
8471
8472
8473/**
8474 * Group 15 jump table for register variant.
8475 */
8476IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
8477{ /* pfx: none, 066h, 0f3h, 0f2h */
8478 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
8479 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
8480 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
8481 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
8482 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8483 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8484 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8485 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8486};
8487AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
8488
8489
8490/**
8491 * Group 15 jump table for memory variant.
8492 */
8493IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
8494{ /* pfx: none, 066h, 0f3h, 0f2h */
8495 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8496 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8497 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8498 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8499 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8500 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8501 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8502 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8503};
8504AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
8505
8506
8507/** Opcode 0x0f 0xae. */
8508FNIEMOP_DEF(iemOp_Grp15)
8509{
8510 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
8511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8512 if (IEM_IS_MODRM_REG_MODE(bRm))
8513 /* register, register */
8514 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8515 + pVCpu->iem.s.idxPrefix], bRm);
8516 /* memory, register */
8517 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8518 + pVCpu->iem.s.idxPrefix], bRm);
8519}
8520
8521
8522/** Opcode 0x0f 0xaf. */
8523FNIEMOP_DEF(iemOp_imul_Gv_Ev)
8524{
8525 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
8526 IEMOP_HLP_MIN_386();
8527 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8528 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
8529}
8530
8531
8532/** Opcode 0x0f 0xb0. */
8533FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
8534{
8535 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
8536 IEMOP_HLP_MIN_486();
8537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8538
8539 if (IEM_IS_MODRM_REG_MODE(bRm))
8540 {
8541 IEMOP_HLP_DONE_DECODING();
8542 IEM_MC_BEGIN(4, 0);
8543 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8544 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8545 IEM_MC_ARG(uint8_t, u8Src, 2);
8546 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8547
8548 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8549 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8550 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
8551 IEM_MC_REF_EFLAGS(pEFlags);
8552 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8553 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8554 else
8555 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8556
8557 IEM_MC_ADVANCE_RIP();
8558 IEM_MC_END();
8559 }
8560 else
8561 {
8562 IEM_MC_BEGIN(4, 3);
8563 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8564 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8565 IEM_MC_ARG(uint8_t, u8Src, 2);
8566 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8568 IEM_MC_LOCAL(uint8_t, u8Al);
8569
8570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8571 IEMOP_HLP_DONE_DECODING();
8572 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8573 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8574 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
8575 IEM_MC_FETCH_EFLAGS(EFlags);
8576 IEM_MC_REF_LOCAL(pu8Al, u8Al);
8577 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8578 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8579 else
8580 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8581
8582 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8583 IEM_MC_COMMIT_EFLAGS(EFlags);
8584 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
8585 IEM_MC_ADVANCE_RIP();
8586 IEM_MC_END();
8587 }
8588 return VINF_SUCCESS;
8589}
8590
8591/** Opcode 0x0f 0xb1. */
8592FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
8593{
8594 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
8595 IEMOP_HLP_MIN_486();
8596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8597
8598 if (IEM_IS_MODRM_REG_MODE(bRm))
8599 {
8600 IEMOP_HLP_DONE_DECODING();
8601 switch (pVCpu->iem.s.enmEffOpSize)
8602 {
8603 case IEMMODE_16BIT:
8604 IEM_MC_BEGIN(4, 0);
8605 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8606 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8607 IEM_MC_ARG(uint16_t, u16Src, 2);
8608 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8609
8610 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8611 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8612 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
8613 IEM_MC_REF_EFLAGS(pEFlags);
8614 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8615 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8616 else
8617 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8618
8619 IEM_MC_ADVANCE_RIP();
8620 IEM_MC_END();
8621 return VINF_SUCCESS;
8622
8623 case IEMMODE_32BIT:
8624 IEM_MC_BEGIN(4, 0);
8625 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8626 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8627 IEM_MC_ARG(uint32_t, u32Src, 2);
8628 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8629
8630 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8631 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8632 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
8633 IEM_MC_REF_EFLAGS(pEFlags);
8634 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8635 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8636 else
8637 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8638
8639 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
8640 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8641 IEM_MC_ADVANCE_RIP();
8642 IEM_MC_END();
8643 return VINF_SUCCESS;
8644
8645 case IEMMODE_64BIT:
8646 IEM_MC_BEGIN(4, 0);
8647 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8648 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8649#ifdef RT_ARCH_X86
8650 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8651#else
8652 IEM_MC_ARG(uint64_t, u64Src, 2);
8653#endif
8654 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8655
8656 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8657 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
8658 IEM_MC_REF_EFLAGS(pEFlags);
8659#ifdef RT_ARCH_X86
8660 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8661 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8662 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8663 else
8664 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8665#else
8666 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8667 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8668 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8669 else
8670 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8671#endif
8672
8673 IEM_MC_ADVANCE_RIP();
8674 IEM_MC_END();
8675 return VINF_SUCCESS;
8676
8677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8678 }
8679 }
8680 else
8681 {
8682 switch (pVCpu->iem.s.enmEffOpSize)
8683 {
8684 case IEMMODE_16BIT:
8685 IEM_MC_BEGIN(4, 3);
8686 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8687 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8688 IEM_MC_ARG(uint16_t, u16Src, 2);
8689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8691 IEM_MC_LOCAL(uint16_t, u16Ax);
8692
8693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8694 IEMOP_HLP_DONE_DECODING();
8695 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8696 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8697 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
8698 IEM_MC_FETCH_EFLAGS(EFlags);
8699 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
8700 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8701 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8702 else
8703 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8704
8705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8706 IEM_MC_COMMIT_EFLAGS(EFlags);
8707 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
8708 IEM_MC_ADVANCE_RIP();
8709 IEM_MC_END();
8710 return VINF_SUCCESS;
8711
8712 case IEMMODE_32BIT:
8713 IEM_MC_BEGIN(4, 3);
8714 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8715 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8716 IEM_MC_ARG(uint32_t, u32Src, 2);
8717 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8719 IEM_MC_LOCAL(uint32_t, u32Eax);
8720
8721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8722 IEMOP_HLP_DONE_DECODING();
8723 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8724 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8725 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
8726 IEM_MC_FETCH_EFLAGS(EFlags);
8727 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
8728 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8729 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8730 else
8731 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8732
8733 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8734 IEM_MC_COMMIT_EFLAGS(EFlags);
8735 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
8736 IEM_MC_ADVANCE_RIP();
8737 IEM_MC_END();
8738 return VINF_SUCCESS;
8739
8740 case IEMMODE_64BIT:
8741 IEM_MC_BEGIN(4, 3);
8742 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8743 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8744#ifdef RT_ARCH_X86
8745 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8746#else
8747 IEM_MC_ARG(uint64_t, u64Src, 2);
8748#endif
8749 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8751 IEM_MC_LOCAL(uint64_t, u64Rax);
8752
8753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8754 IEMOP_HLP_DONE_DECODING();
8755 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8756 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8757 IEM_MC_FETCH_EFLAGS(EFlags);
8758 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8759#ifdef RT_ARCH_X86
8760 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8761 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8762 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8763 else
8764 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8765#else
8766 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8767 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8768 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8769 else
8770 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8771#endif
8772
8773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8774 IEM_MC_COMMIT_EFLAGS(EFlags);
8775 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8776 IEM_MC_ADVANCE_RIP();
8777 IEM_MC_END();
8778 return VINF_SUCCESS;
8779
8780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8781 }
8782 }
8783}
8784
8785
8786FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8787{
8788 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8789 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8790
8791 switch (pVCpu->iem.s.enmEffOpSize)
8792 {
8793 case IEMMODE_16BIT:
8794 IEM_MC_BEGIN(5, 1);
8795 IEM_MC_ARG(uint16_t, uSel, 0);
8796 IEM_MC_ARG(uint16_t, offSeg, 1);
8797 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8798 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8799 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8800 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8803 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8804 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8805 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8806 IEM_MC_END();
8807 return VINF_SUCCESS;
8808
8809 case IEMMODE_32BIT:
8810 IEM_MC_BEGIN(5, 1);
8811 IEM_MC_ARG(uint16_t, uSel, 0);
8812 IEM_MC_ARG(uint32_t, offSeg, 1);
8813 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8814 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8815 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8816 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8819 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8820 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8821 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8822 IEM_MC_END();
8823 return VINF_SUCCESS;
8824
8825 case IEMMODE_64BIT:
8826 IEM_MC_BEGIN(5, 1);
8827 IEM_MC_ARG(uint16_t, uSel, 0);
8828 IEM_MC_ARG(uint64_t, offSeg, 1);
8829 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8830 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8831 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8832 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8835 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8836 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8837 else
8838 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8839 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8840 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8841 IEM_MC_END();
8842 return VINF_SUCCESS;
8843
8844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8845 }
8846}
8847
8848
8849/** Opcode 0x0f 0xb2. */
8850FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8851{
8852 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8853 IEMOP_HLP_MIN_386();
8854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8855 if (IEM_IS_MODRM_REG_MODE(bRm))
8856 return IEMOP_RAISE_INVALID_OPCODE();
8857 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8858}
8859
8860
8861/** Opcode 0x0f 0xb3. */
8862FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8863{
8864 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8865 IEMOP_HLP_MIN_386();
8866 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8867}
8868
8869
8870/** Opcode 0x0f 0xb4. */
8871FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8872{
8873 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8874 IEMOP_HLP_MIN_386();
8875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8876 if (IEM_IS_MODRM_REG_MODE(bRm))
8877 return IEMOP_RAISE_INVALID_OPCODE();
8878 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8879}
8880
8881
8882/** Opcode 0x0f 0xb5. */
8883FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8884{
8885 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8886 IEMOP_HLP_MIN_386();
8887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8888 if (IEM_IS_MODRM_REG_MODE(bRm))
8889 return IEMOP_RAISE_INVALID_OPCODE();
8890 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8891}
8892
8893
8894/** Opcode 0x0f 0xb6. */
8895FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8896{
8897 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8898 IEMOP_HLP_MIN_386();
8899
8900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8901
8902 /*
8903 * If rm is denoting a register, no more instruction bytes.
8904 */
8905 if (IEM_IS_MODRM_REG_MODE(bRm))
8906 {
8907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8908 switch (pVCpu->iem.s.enmEffOpSize)
8909 {
8910 case IEMMODE_16BIT:
8911 IEM_MC_BEGIN(0, 1);
8912 IEM_MC_LOCAL(uint16_t, u16Value);
8913 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8914 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8915 IEM_MC_ADVANCE_RIP();
8916 IEM_MC_END();
8917 return VINF_SUCCESS;
8918
8919 case IEMMODE_32BIT:
8920 IEM_MC_BEGIN(0, 1);
8921 IEM_MC_LOCAL(uint32_t, u32Value);
8922 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8923 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8924 IEM_MC_ADVANCE_RIP();
8925 IEM_MC_END();
8926 return VINF_SUCCESS;
8927
8928 case IEMMODE_64BIT:
8929 IEM_MC_BEGIN(0, 1);
8930 IEM_MC_LOCAL(uint64_t, u64Value);
8931 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8932 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8933 IEM_MC_ADVANCE_RIP();
8934 IEM_MC_END();
8935 return VINF_SUCCESS;
8936
8937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8938 }
8939 }
8940 else
8941 {
8942 /*
8943 * We're loading a register from memory.
8944 */
8945 switch (pVCpu->iem.s.enmEffOpSize)
8946 {
8947 case IEMMODE_16BIT:
8948 IEM_MC_BEGIN(0, 2);
8949 IEM_MC_LOCAL(uint16_t, u16Value);
8950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8953 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8954 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8955 IEM_MC_ADVANCE_RIP();
8956 IEM_MC_END();
8957 return VINF_SUCCESS;
8958
8959 case IEMMODE_32BIT:
8960 IEM_MC_BEGIN(0, 2);
8961 IEM_MC_LOCAL(uint32_t, u32Value);
8962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8965 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8966 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8967 IEM_MC_ADVANCE_RIP();
8968 IEM_MC_END();
8969 return VINF_SUCCESS;
8970
8971 case IEMMODE_64BIT:
8972 IEM_MC_BEGIN(0, 2);
8973 IEM_MC_LOCAL(uint64_t, u64Value);
8974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8977 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8978 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8979 IEM_MC_ADVANCE_RIP();
8980 IEM_MC_END();
8981 return VINF_SUCCESS;
8982
8983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8984 }
8985 }
8986}
8987
8988
8989/** Opcode 0x0f 0xb7. */
8990FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8991{
8992 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8993 IEMOP_HLP_MIN_386();
8994
8995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8996
8997 /** @todo Not entirely sure how the operand size prefix is handled here,
8998 * assuming that it will be ignored. Would be nice to have a few
8999 * test for this. */
9000 /*
9001 * If rm is denoting a register, no more instruction bytes.
9002 */
9003 if (IEM_IS_MODRM_REG_MODE(bRm))
9004 {
9005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9006 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9007 {
9008 IEM_MC_BEGIN(0, 1);
9009 IEM_MC_LOCAL(uint32_t, u32Value);
9010 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9011 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9012 IEM_MC_ADVANCE_RIP();
9013 IEM_MC_END();
9014 }
9015 else
9016 {
9017 IEM_MC_BEGIN(0, 1);
9018 IEM_MC_LOCAL(uint64_t, u64Value);
9019 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9020 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9021 IEM_MC_ADVANCE_RIP();
9022 IEM_MC_END();
9023 }
9024 }
9025 else
9026 {
9027 /*
9028 * We're loading a register from memory.
9029 */
9030 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9031 {
9032 IEM_MC_BEGIN(0, 2);
9033 IEM_MC_LOCAL(uint32_t, u32Value);
9034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9037 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9038 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9039 IEM_MC_ADVANCE_RIP();
9040 IEM_MC_END();
9041 }
9042 else
9043 {
9044 IEM_MC_BEGIN(0, 2);
9045 IEM_MC_LOCAL(uint64_t, u64Value);
9046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9049 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9050 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9051 IEM_MC_ADVANCE_RIP();
9052 IEM_MC_END();
9053 }
9054 }
9055 return VINF_SUCCESS;
9056}
9057
9058
9059/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
9060FNIEMOP_UD_STUB(iemOp_jmpe);
9061
9062
9063/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
9064FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
9065{
9066 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9067 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
9068 return iemOp_InvalidNeedRM(pVCpu);
9069#ifndef TST_IEM_CHECK_MC
9070# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
9071 static const IEMOPBINSIZES s_Native =
9072 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
9073# endif
9074 static const IEMOPBINSIZES s_Fallback =
9075 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
9076#endif
9077 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
9078}
9079
9080
9081/**
9082 * @opcode 0xb9
9083 * @opinvalid intel-modrm
9084 * @optest ->
9085 */
9086FNIEMOP_DEF(iemOp_Grp10)
9087{
9088 /*
9089 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
9090 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
9091 */
9092 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
9093 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
9094 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
9095}
9096
9097
9098/** Opcode 0x0f 0xba. */
9099FNIEMOP_DEF(iemOp_Grp8)
9100{
9101 IEMOP_HLP_MIN_386();
9102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9103 PCIEMOPBINSIZES pImpl;
9104 switch (IEM_GET_MODRM_REG_8(bRm))
9105 {
9106 case 0: case 1: case 2: case 3:
9107 /* Both AMD and Intel want full modr/m decoding and imm8. */
9108 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
9109 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
9110 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
9111 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
9112 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
9113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9114 }
9115 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9116
9117 if (IEM_IS_MODRM_REG_MODE(bRm))
9118 {
9119 /* register destination. */
9120 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9122
9123 switch (pVCpu->iem.s.enmEffOpSize)
9124 {
9125 case IEMMODE_16BIT:
9126 IEM_MC_BEGIN(3, 0);
9127 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9128 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
9129 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9130
9131 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9132 IEM_MC_REF_EFLAGS(pEFlags);
9133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9134
9135 IEM_MC_ADVANCE_RIP();
9136 IEM_MC_END();
9137 return VINF_SUCCESS;
9138
9139 case IEMMODE_32BIT:
9140 IEM_MC_BEGIN(3, 0);
9141 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9142 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
9143 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9144
9145 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9146 IEM_MC_REF_EFLAGS(pEFlags);
9147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9148
9149 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9150 IEM_MC_ADVANCE_RIP();
9151 IEM_MC_END();
9152 return VINF_SUCCESS;
9153
9154 case IEMMODE_64BIT:
9155 IEM_MC_BEGIN(3, 0);
9156 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9157 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
9158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9159
9160 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9161 IEM_MC_REF_EFLAGS(pEFlags);
9162 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9163
9164 IEM_MC_ADVANCE_RIP();
9165 IEM_MC_END();
9166 return VINF_SUCCESS;
9167
9168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9169 }
9170 }
9171 else
9172 {
9173 /* memory destination. */
9174
9175 uint32_t fAccess;
9176 if (pImpl->pfnLockedU16)
9177 fAccess = IEM_ACCESS_DATA_RW;
9178 else /* BT */
9179 fAccess = IEM_ACCESS_DATA_R;
9180
9181 /** @todo test negative bit offsets! */
9182 switch (pVCpu->iem.s.enmEffOpSize)
9183 {
9184 case IEMMODE_16BIT:
9185 IEM_MC_BEGIN(3, 1);
9186 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9187 IEM_MC_ARG(uint16_t, u16Src, 1);
9188 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9190
9191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9192 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9193 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
9194 if (pImpl->pfnLockedU16)
9195 IEMOP_HLP_DONE_DECODING();
9196 else
9197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9198 IEM_MC_FETCH_EFLAGS(EFlags);
9199 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9200 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9202 else
9203 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9204 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9205
9206 IEM_MC_COMMIT_EFLAGS(EFlags);
9207 IEM_MC_ADVANCE_RIP();
9208 IEM_MC_END();
9209 return VINF_SUCCESS;
9210
9211 case IEMMODE_32BIT:
9212 IEM_MC_BEGIN(3, 1);
9213 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9214 IEM_MC_ARG(uint32_t, u32Src, 1);
9215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9217
9218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9219 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9220 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
9221 if (pImpl->pfnLockedU16)
9222 IEMOP_HLP_DONE_DECODING();
9223 else
9224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9225 IEM_MC_FETCH_EFLAGS(EFlags);
9226 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9227 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9229 else
9230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9231 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9232
9233 IEM_MC_COMMIT_EFLAGS(EFlags);
9234 IEM_MC_ADVANCE_RIP();
9235 IEM_MC_END();
9236 return VINF_SUCCESS;
9237
9238 case IEMMODE_64BIT:
9239 IEM_MC_BEGIN(3, 1);
9240 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9241 IEM_MC_ARG(uint64_t, u64Src, 1);
9242 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9244
9245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9246 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9247 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
9248 if (pImpl->pfnLockedU16)
9249 IEMOP_HLP_DONE_DECODING();
9250 else
9251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9252 IEM_MC_FETCH_EFLAGS(EFlags);
9253 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9254 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9256 else
9257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9258 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9259
9260 IEM_MC_COMMIT_EFLAGS(EFlags);
9261 IEM_MC_ADVANCE_RIP();
9262 IEM_MC_END();
9263 return VINF_SUCCESS;
9264
9265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9266 }
9267 }
9268}
9269
9270
9271/** Opcode 0x0f 0xbb. */
9272FNIEMOP_DEF(iemOp_btc_Ev_Gv)
9273{
9274 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
9275 IEMOP_HLP_MIN_386();
9276 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
9277}
9278
9279
9280/**
9281 * Common worker for BSF and BSR instructions.
9282 *
9283 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
9284 * the destination register, which means that for 32-bit operations the high
9285 * bits must be left alone.
9286 *
9287 * @param pImpl Pointer to the instruction implementation (assembly).
9288 */
9289FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
9290{
9291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9292
9293 /*
9294 * If rm is denoting a register, no more instruction bytes.
9295 */
9296 if (IEM_IS_MODRM_REG_MODE(bRm))
9297 {
9298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9299 switch (pVCpu->iem.s.enmEffOpSize)
9300 {
9301 case IEMMODE_16BIT:
9302 IEM_MC_BEGIN(3, 0);
9303 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9304 IEM_MC_ARG(uint16_t, u16Src, 1);
9305 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9306
9307 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9308 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9309 IEM_MC_REF_EFLAGS(pEFlags);
9310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9311
9312 IEM_MC_ADVANCE_RIP();
9313 IEM_MC_END();
9314 break;
9315
9316 case IEMMODE_32BIT:
9317 IEM_MC_BEGIN(3, 0);
9318 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9319 IEM_MC_ARG(uint32_t, u32Src, 1);
9320 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9321
9322 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9323 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9324 IEM_MC_REF_EFLAGS(pEFlags);
9325 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9326 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9327 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9328 IEM_MC_ENDIF();
9329 IEM_MC_ADVANCE_RIP();
9330 IEM_MC_END();
9331 break;
9332
9333 case IEMMODE_64BIT:
9334 IEM_MC_BEGIN(3, 0);
9335 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9336 IEM_MC_ARG(uint64_t, u64Src, 1);
9337 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9338
9339 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9340 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9341 IEM_MC_REF_EFLAGS(pEFlags);
9342 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9343
9344 IEM_MC_ADVANCE_RIP();
9345 IEM_MC_END();
9346 break;
9347 }
9348 }
9349 else
9350 {
9351 /*
9352 * We're accessing memory.
9353 */
9354 switch (pVCpu->iem.s.enmEffOpSize)
9355 {
9356 case IEMMODE_16BIT:
9357 IEM_MC_BEGIN(3, 1);
9358 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9359 IEM_MC_ARG(uint16_t, u16Src, 1);
9360 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9362
9363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9365 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9366 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9367 IEM_MC_REF_EFLAGS(pEFlags);
9368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9369
9370 IEM_MC_ADVANCE_RIP();
9371 IEM_MC_END();
9372 break;
9373
9374 case IEMMODE_32BIT:
9375 IEM_MC_BEGIN(3, 1);
9376 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9377 IEM_MC_ARG(uint32_t, u32Src, 1);
9378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9380
9381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9383 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9384 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9385 IEM_MC_REF_EFLAGS(pEFlags);
9386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9387
9388 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9389 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9390 IEM_MC_ENDIF();
9391 IEM_MC_ADVANCE_RIP();
9392 IEM_MC_END();
9393 break;
9394
9395 case IEMMODE_64BIT:
9396 IEM_MC_BEGIN(3, 1);
9397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9398 IEM_MC_ARG(uint64_t, u64Src, 1);
9399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9401
9402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9404 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9405 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9406 IEM_MC_REF_EFLAGS(pEFlags);
9407 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9408
9409 IEM_MC_ADVANCE_RIP();
9410 IEM_MC_END();
9411 break;
9412 }
9413 }
9414 return VINF_SUCCESS;
9415}
9416
9417
9418/** Opcode 0x0f 0xbc. */
9419FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
9420{
9421 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
9422 IEMOP_HLP_MIN_386();
9423 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9424 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
9425}
9426
9427
9428/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
9429FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
9430{
9431 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9432 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
9433 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9434
9435#ifndef TST_IEM_CHECK_MC
9436 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
9437 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
9438 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
9439 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
9440 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
9441 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
9442 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
9443 {
9444 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
9445 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
9446 };
9447#endif
9448 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9449 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9450 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9451}
9452
9453
9454/** Opcode 0x0f 0xbd. */
9455FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
9456{
9457 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
9458 IEMOP_HLP_MIN_386();
9459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9460 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
9461}
9462
9463
9464/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
9465FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
9466{
9467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9468 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
9469 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9470
9471#ifndef TST_IEM_CHECK_MC
9472 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
9473 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
9474 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
9475 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
9476 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
9477 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
9478 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
9479 {
9480 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
9481 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
9482 };
9483#endif
9484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9485 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9486 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9487}
9488
9489
9490
9491/** Opcode 0x0f 0xbe. */
9492FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
9493{
9494 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
9495 IEMOP_HLP_MIN_386();
9496
9497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9498
9499 /*
9500 * If rm is denoting a register, no more instruction bytes.
9501 */
9502 if (IEM_IS_MODRM_REG_MODE(bRm))
9503 {
9504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9505 switch (pVCpu->iem.s.enmEffOpSize)
9506 {
9507 case IEMMODE_16BIT:
9508 IEM_MC_BEGIN(0, 1);
9509 IEM_MC_LOCAL(uint16_t, u16Value);
9510 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9511 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9512 IEM_MC_ADVANCE_RIP();
9513 IEM_MC_END();
9514 return VINF_SUCCESS;
9515
9516 case IEMMODE_32BIT:
9517 IEM_MC_BEGIN(0, 1);
9518 IEM_MC_LOCAL(uint32_t, u32Value);
9519 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9520 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9521 IEM_MC_ADVANCE_RIP();
9522 IEM_MC_END();
9523 return VINF_SUCCESS;
9524
9525 case IEMMODE_64BIT:
9526 IEM_MC_BEGIN(0, 1);
9527 IEM_MC_LOCAL(uint64_t, u64Value);
9528 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9529 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9530 IEM_MC_ADVANCE_RIP();
9531 IEM_MC_END();
9532 return VINF_SUCCESS;
9533
9534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9535 }
9536 }
9537 else
9538 {
9539 /*
9540 * We're loading a register from memory.
9541 */
9542 switch (pVCpu->iem.s.enmEffOpSize)
9543 {
9544 case IEMMODE_16BIT:
9545 IEM_MC_BEGIN(0, 2);
9546 IEM_MC_LOCAL(uint16_t, u16Value);
9547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9550 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9551 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9552 IEM_MC_ADVANCE_RIP();
9553 IEM_MC_END();
9554 return VINF_SUCCESS;
9555
9556 case IEMMODE_32BIT:
9557 IEM_MC_BEGIN(0, 2);
9558 IEM_MC_LOCAL(uint32_t, u32Value);
9559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9562 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9563 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9564 IEM_MC_ADVANCE_RIP();
9565 IEM_MC_END();
9566 return VINF_SUCCESS;
9567
9568 case IEMMODE_64BIT:
9569 IEM_MC_BEGIN(0, 2);
9570 IEM_MC_LOCAL(uint64_t, u64Value);
9571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9574 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9575 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9576 IEM_MC_ADVANCE_RIP();
9577 IEM_MC_END();
9578 return VINF_SUCCESS;
9579
9580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9581 }
9582 }
9583}
9584
9585
9586/** Opcode 0x0f 0xbf. */
9587FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
9588{
9589 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
9590 IEMOP_HLP_MIN_386();
9591
9592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9593
9594 /** @todo Not entirely sure how the operand size prefix is handled here,
9595 * assuming that it will be ignored. Would be nice to have a few
9596 * test for this. */
9597 /*
9598 * If rm is denoting a register, no more instruction bytes.
9599 */
9600 if (IEM_IS_MODRM_REG_MODE(bRm))
9601 {
9602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9603 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9604 {
9605 IEM_MC_BEGIN(0, 1);
9606 IEM_MC_LOCAL(uint32_t, u32Value);
9607 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9608 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9609 IEM_MC_ADVANCE_RIP();
9610 IEM_MC_END();
9611 }
9612 else
9613 {
9614 IEM_MC_BEGIN(0, 1);
9615 IEM_MC_LOCAL(uint64_t, u64Value);
9616 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9617 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9618 IEM_MC_ADVANCE_RIP();
9619 IEM_MC_END();
9620 }
9621 }
9622 else
9623 {
9624 /*
9625 * We're loading a register from memory.
9626 */
9627 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9628 {
9629 IEM_MC_BEGIN(0, 2);
9630 IEM_MC_LOCAL(uint32_t, u32Value);
9631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9634 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9635 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9636 IEM_MC_ADVANCE_RIP();
9637 IEM_MC_END();
9638 }
9639 else
9640 {
9641 IEM_MC_BEGIN(0, 2);
9642 IEM_MC_LOCAL(uint64_t, u64Value);
9643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9646 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9647 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9648 IEM_MC_ADVANCE_RIP();
9649 IEM_MC_END();
9650 }
9651 }
9652 return VINF_SUCCESS;
9653}
9654
9655
9656/** Opcode 0x0f 0xc0. */
9657FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
9658{
9659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9660 IEMOP_HLP_MIN_486();
9661 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
9662
9663 /*
9664 * If rm is denoting a register, no more instruction bytes.
9665 */
9666 if (IEM_IS_MODRM_REG_MODE(bRm))
9667 {
9668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9669
9670 IEM_MC_BEGIN(3, 0);
9671 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9672 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9673 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9674
9675 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9676 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9677 IEM_MC_REF_EFLAGS(pEFlags);
9678 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9679
9680 IEM_MC_ADVANCE_RIP();
9681 IEM_MC_END();
9682 }
9683 else
9684 {
9685 /*
9686 * We're accessing memory.
9687 */
9688 IEM_MC_BEGIN(3, 3);
9689 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9690 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9691 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9692 IEM_MC_LOCAL(uint8_t, u8RegCopy);
9693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9694
9695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9696 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9697 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9698 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
9699 IEM_MC_FETCH_EFLAGS(EFlags);
9700 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9701 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9702 else
9703 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
9704
9705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9706 IEM_MC_COMMIT_EFLAGS(EFlags);
9707 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
9708 IEM_MC_ADVANCE_RIP();
9709 IEM_MC_END();
9710 return VINF_SUCCESS;
9711 }
9712 return VINF_SUCCESS;
9713}
9714
9715
9716/** Opcode 0x0f 0xc1. */
9717FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
9718{
9719 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
9720 IEMOP_HLP_MIN_486();
9721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9722
9723 /*
9724 * If rm is denoting a register, no more instruction bytes.
9725 */
9726 if (IEM_IS_MODRM_REG_MODE(bRm))
9727 {
9728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9729
9730 switch (pVCpu->iem.s.enmEffOpSize)
9731 {
9732 case IEMMODE_16BIT:
9733 IEM_MC_BEGIN(3, 0);
9734 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9735 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9736 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9737
9738 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9739 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9740 IEM_MC_REF_EFLAGS(pEFlags);
9741 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9742
9743 IEM_MC_ADVANCE_RIP();
9744 IEM_MC_END();
9745 return VINF_SUCCESS;
9746
9747 case IEMMODE_32BIT:
9748 IEM_MC_BEGIN(3, 0);
9749 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9750 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9751 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9752
9753 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9754 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9755 IEM_MC_REF_EFLAGS(pEFlags);
9756 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9757
9758 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9759 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9760 IEM_MC_ADVANCE_RIP();
9761 IEM_MC_END();
9762 return VINF_SUCCESS;
9763
9764 case IEMMODE_64BIT:
9765 IEM_MC_BEGIN(3, 0);
9766 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9767 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9768 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9769
9770 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9771 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9772 IEM_MC_REF_EFLAGS(pEFlags);
9773 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9774
9775 IEM_MC_ADVANCE_RIP();
9776 IEM_MC_END();
9777 return VINF_SUCCESS;
9778
9779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9780 }
9781 }
9782 else
9783 {
9784 /*
9785 * We're accessing memory.
9786 */
9787 switch (pVCpu->iem.s.enmEffOpSize)
9788 {
9789 case IEMMODE_16BIT:
9790 IEM_MC_BEGIN(3, 3);
9791 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9792 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9793 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9794 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9796
9797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9798 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9799 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9800 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9801 IEM_MC_FETCH_EFLAGS(EFlags);
9802 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9803 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9804 else
9805 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9806
9807 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9808 IEM_MC_COMMIT_EFLAGS(EFlags);
9809 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9810 IEM_MC_ADVANCE_RIP();
9811 IEM_MC_END();
9812 return VINF_SUCCESS;
9813
9814 case IEMMODE_32BIT:
9815 IEM_MC_BEGIN(3, 3);
9816 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9817 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9818 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9819 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9821
9822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9823 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9824 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9825 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9826 IEM_MC_FETCH_EFLAGS(EFlags);
9827 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9828 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9829 else
9830 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9831
9832 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9833 IEM_MC_COMMIT_EFLAGS(EFlags);
9834 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9835 IEM_MC_ADVANCE_RIP();
9836 IEM_MC_END();
9837 return VINF_SUCCESS;
9838
9839 case IEMMODE_64BIT:
9840 IEM_MC_BEGIN(3, 3);
9841 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9842 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9843 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9844 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9846
9847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9848 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9849 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9850 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9851 IEM_MC_FETCH_EFLAGS(EFlags);
9852 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9853 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9854 else
9855 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9856
9857 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9858 IEM_MC_COMMIT_EFLAGS(EFlags);
9859 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9860 IEM_MC_ADVANCE_RIP();
9861 IEM_MC_END();
9862 return VINF_SUCCESS;
9863
9864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9865 }
9866 }
9867}
9868
9869
9870/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9871FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9872/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9873FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9874/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9875FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9876/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9877FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9878
9879
9880/** Opcode 0x0f 0xc3. */
9881FNIEMOP_DEF(iemOp_movnti_My_Gy)
9882{
9883 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9884
9885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9886
9887 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9888 if (IEM_IS_MODRM_MEM_MODE(bRm))
9889 {
9890 switch (pVCpu->iem.s.enmEffOpSize)
9891 {
9892 case IEMMODE_32BIT:
9893 IEM_MC_BEGIN(0, 2);
9894 IEM_MC_LOCAL(uint32_t, u32Value);
9895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9896
9897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9900 return IEMOP_RAISE_INVALID_OPCODE();
9901
9902 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9903 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9904 IEM_MC_ADVANCE_RIP();
9905 IEM_MC_END();
9906 break;
9907
9908 case IEMMODE_64BIT:
9909 IEM_MC_BEGIN(0, 2);
9910 IEM_MC_LOCAL(uint64_t, u64Value);
9911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9912
9913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9915 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9916 return IEMOP_RAISE_INVALID_OPCODE();
9917
9918 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9919 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9920 IEM_MC_ADVANCE_RIP();
9921 IEM_MC_END();
9922 break;
9923
9924 case IEMMODE_16BIT:
9925 /** @todo check this form. */
9926 return IEMOP_RAISE_INVALID_OPCODE();
9927 }
9928 }
9929 else
9930 return IEMOP_RAISE_INVALID_OPCODE();
9931 return VINF_SUCCESS;
9932}
9933/* Opcode 0x66 0x0f 0xc3 - invalid */
9934/* Opcode 0xf3 0x0f 0xc3 - invalid */
9935/* Opcode 0xf2 0x0f 0xc3 - invalid */
9936
9937/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9938FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9939/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9940FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9941/* Opcode 0xf3 0x0f 0xc4 - invalid */
9942/* Opcode 0xf2 0x0f 0xc4 - invalid */
9943
9944/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9945FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9946/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9947FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9948/* Opcode 0xf3 0x0f 0xc5 - invalid */
9949/* Opcode 0xf2 0x0f 0xc5 - invalid */
9950
9951/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9952FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
9953{
9954 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9955 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9956 if (IEM_IS_MODRM_REG_MODE(bRm))
9957 {
9958 /*
9959 * Register, register.
9960 */
9961 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
9962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9963 IEM_MC_BEGIN(3, 0);
9964 IEM_MC_ARG(PRTUINT128U, pDst, 0);
9965 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
9966 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
9967 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
9968 IEM_MC_PREPARE_SSE_USAGE();
9969 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
9970 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
9971 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
9972 IEM_MC_ADVANCE_RIP();
9973 IEM_MC_END();
9974 }
9975 else
9976 {
9977 /*
9978 * Register, memory.
9979 */
9980 IEM_MC_BEGIN(3, 2);
9981 IEM_MC_ARG(PRTUINT128U, pDst, 0);
9982 IEM_MC_LOCAL(RTUINT128U, uSrc);
9983 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
9984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9985
9986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9987 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
9988 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
9989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9990 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
9991 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9992
9993 IEM_MC_PREPARE_SSE_USAGE();
9994 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
9995 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bEvilArg);
9996
9997 IEM_MC_ADVANCE_RIP();
9998 IEM_MC_END();
9999 }
10000 return VINF_SUCCESS;
10001}
10002
10003
10004/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
10005FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
10006{
10007 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10009 if (IEM_IS_MODRM_REG_MODE(bRm))
10010 {
10011 /*
10012 * Register, register.
10013 */
10014 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
10015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10016 IEM_MC_BEGIN(3, 0);
10017 IEM_MC_ARG(PRTUINT128U, pDst, 0);
10018 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
10019 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
10020 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10021 IEM_MC_PREPARE_SSE_USAGE();
10022 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10023 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10024 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
10025 IEM_MC_ADVANCE_RIP();
10026 IEM_MC_END();
10027 }
10028 else
10029 {
10030 /*
10031 * Register, memory.
10032 */
10033 IEM_MC_BEGIN(3, 2);
10034 IEM_MC_ARG(PRTUINT128U, pDst, 0);
10035 IEM_MC_LOCAL(RTUINT128U, uSrc);
10036 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
10037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10038
10039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10040 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
10041 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
10042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10043 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10044 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10045
10046 IEM_MC_PREPARE_SSE_USAGE();
10047 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10048 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bEvilArg);
10049
10050 IEM_MC_ADVANCE_RIP();
10051 IEM_MC_END();
10052 }
10053 return VINF_SUCCESS;
10054}
10055
10056
10057/* Opcode 0xf3 0x0f 0xc6 - invalid */
10058/* Opcode 0xf2 0x0f 0xc6 - invalid */
10059
10060
10061/** Opcode 0x0f 0xc7 !11/1. */
10062FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
10063{
10064 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
10065
10066 IEM_MC_BEGIN(4, 3);
10067 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
10068 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
10069 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
10070 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
10071 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
10072 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
10073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10074
10075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10076 IEMOP_HLP_DONE_DECODING();
10077 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10078
10079 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
10080 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
10081 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
10082
10083 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
10084 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
10085 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
10086
10087 IEM_MC_FETCH_EFLAGS(EFlags);
10088 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10089 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
10090 else
10091 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
10092
10093 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
10094 IEM_MC_COMMIT_EFLAGS(EFlags);
10095 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10096 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
10097 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
10098 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
10099 IEM_MC_ENDIF();
10100 IEM_MC_ADVANCE_RIP();
10101
10102 IEM_MC_END();
10103 return VINF_SUCCESS;
10104}
10105
10106
10107/** Opcode REX.W 0x0f 0xc7 !11/1. */
10108FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
10109{
10110 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
10111 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
10112 {
10113#if 0
10114 RT_NOREF(bRm);
10115 IEMOP_BITCH_ABOUT_STUB();
10116 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
10117#else
10118 IEM_MC_BEGIN(4, 3);
10119 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
10120 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
10121 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
10122 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
10123 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
10124 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
10125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10126
10127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10128 IEMOP_HLP_DONE_DECODING();
10129 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
10130 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10131
10132 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
10133 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
10134 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
10135
10136 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
10137 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
10138 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
10139
10140 IEM_MC_FETCH_EFLAGS(EFlags);
10141# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
10142# if defined(RT_ARCH_AMD64)
10143 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
10144# endif
10145 {
10146 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10147 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10148 else
10149 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10150 }
10151# if defined(RT_ARCH_AMD64)
10152 else
10153# endif
10154# endif
10155# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
10156 {
10157 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
10158 accesses and not all all atomic, which works fine on in UNI CPU guest
10159 configuration (ignoring DMA). If guest SMP is active we have no choice
10160 but to use a rendezvous callback here. Sigh. */
10161 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
10162 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10163 else
10164 {
10165 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
10166 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
10167 }
10168 }
10169# endif
10170
10171 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
10172 IEM_MC_COMMIT_EFLAGS(EFlags);
10173 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
10174 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
10175 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
10176 IEM_MC_ENDIF();
10177 IEM_MC_ADVANCE_RIP();
10178
10179 IEM_MC_END();
10180 return VINF_SUCCESS;
10181#endif
10182 }
10183 Log(("cmpxchg16b -> #UD\n"));
10184 return IEMOP_RAISE_INVALID_OPCODE();
10185}
10186
10187FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
10188{
10189 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
10190 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
10191 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
10192}
10193
10194/** Opcode 0x0f 0xc7 11/6. */
10195FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
10196
10197/** Opcode 0x0f 0xc7 !11/6. */
10198#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10199FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
10200{
10201 IEMOP_MNEMONIC(vmptrld, "vmptrld");
10202 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
10203 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
10204 IEM_MC_BEGIN(2, 0);
10205 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10206 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
10207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10208 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
10209 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10210 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
10211 IEM_MC_END();
10212 return VINF_SUCCESS;
10213}
10214#else
10215FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
10216#endif
10217
10218/** Opcode 0x66 0x0f 0xc7 !11/6. */
10219#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10220FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
10221{
10222 IEMOP_MNEMONIC(vmclear, "vmclear");
10223 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
10224 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
10225 IEM_MC_BEGIN(2, 0);
10226 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10227 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
10228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10229 IEMOP_HLP_DONE_DECODING();
10230 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10231 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
10232 IEM_MC_END();
10233 return VINF_SUCCESS;
10234}
10235#else
10236FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
10237#endif
10238
10239/** Opcode 0xf3 0x0f 0xc7 !11/6. */
10240#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10241FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
10242{
10243 IEMOP_MNEMONIC(vmxon, "vmxon");
10244 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
10245 IEM_MC_BEGIN(2, 0);
10246 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10247 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
10248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10249 IEMOP_HLP_DONE_DECODING();
10250 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10251 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
10252 IEM_MC_END();
10253 return VINF_SUCCESS;
10254}
10255#else
10256FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
10257#endif
10258
10259/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
10260#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
10261FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
10262{
10263 IEMOP_MNEMONIC(vmptrst, "vmptrst");
10264 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
10265 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
10266 IEM_MC_BEGIN(2, 0);
10267 IEM_MC_ARG(uint8_t, iEffSeg, 0);
10268 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
10269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10270 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
10271 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10272 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
10273 IEM_MC_END();
10274 return VINF_SUCCESS;
10275}
10276#else
10277FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
10278#endif
10279
10280/** Opcode 0x0f 0xc7 11/7. */
10281FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
10282
10283
10284/**
10285 * Group 9 jump table for register variant.
10286 */
10287IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
10288{ /* pfx: none, 066h, 0f3h, 0f2h */
10289 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
10290 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
10291 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
10292 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
10293 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10294 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
10295 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10296 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10297};
10298AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
10299
10300
10301/**
10302 * Group 9 jump table for memory variant.
10303 */
10304IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
10305{ /* pfx: none, 066h, 0f3h, 0f2h */
10306 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
10307 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
10308 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
10309 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
10310 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10311 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
10312 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
10313 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10314};
10315AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
10316
10317
10318/** Opcode 0x0f 0xc7. */
10319FNIEMOP_DEF(iemOp_Grp9)
10320{
10321 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
10322 if (IEM_IS_MODRM_REG_MODE(bRm))
10323 /* register, register */
10324 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10325 + pVCpu->iem.s.idxPrefix], bRm);
10326 /* memory, register */
10327 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10328 + pVCpu->iem.s.idxPrefix], bRm);
10329}
10330
10331
10332/**
10333 * Common 'bswap register' helper.
10334 */
10335FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
10336{
10337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10338 switch (pVCpu->iem.s.enmEffOpSize)
10339 {
10340 case IEMMODE_16BIT:
10341 IEM_MC_BEGIN(1, 0);
10342 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10343 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
10344 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
10345 IEM_MC_ADVANCE_RIP();
10346 IEM_MC_END();
10347 return VINF_SUCCESS;
10348
10349 case IEMMODE_32BIT:
10350 IEM_MC_BEGIN(1, 0);
10351 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10352 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
10353 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10354 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
10355 IEM_MC_ADVANCE_RIP();
10356 IEM_MC_END();
10357 return VINF_SUCCESS;
10358
10359 case IEMMODE_64BIT:
10360 IEM_MC_BEGIN(1, 0);
10361 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10362 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
10363 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
10364 IEM_MC_ADVANCE_RIP();
10365 IEM_MC_END();
10366 return VINF_SUCCESS;
10367
10368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10369 }
10370}
10371
10372
10373/** Opcode 0x0f 0xc8. */
10374FNIEMOP_DEF(iemOp_bswap_rAX_r8)
10375{
10376 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
10377 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
10378 prefix. REX.B is the correct prefix it appears. For a parallel
10379 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
10380 IEMOP_HLP_MIN_486();
10381 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
10382}
10383
10384
10385/** Opcode 0x0f 0xc9. */
10386FNIEMOP_DEF(iemOp_bswap_rCX_r9)
10387{
10388 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
10389 IEMOP_HLP_MIN_486();
10390 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
10391}
10392
10393
10394/** Opcode 0x0f 0xca. */
10395FNIEMOP_DEF(iemOp_bswap_rDX_r10)
10396{
10397 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
10398 IEMOP_HLP_MIN_486();
10399 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
10400}
10401
10402
10403/** Opcode 0x0f 0xcb. */
10404FNIEMOP_DEF(iemOp_bswap_rBX_r11)
10405{
10406 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
10407 IEMOP_HLP_MIN_486();
10408 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
10409}
10410
10411
10412/** Opcode 0x0f 0xcc. */
10413FNIEMOP_DEF(iemOp_bswap_rSP_r12)
10414{
10415 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
10416 IEMOP_HLP_MIN_486();
10417 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
10418}
10419
10420
10421/** Opcode 0x0f 0xcd. */
10422FNIEMOP_DEF(iemOp_bswap_rBP_r13)
10423{
10424 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
10425 IEMOP_HLP_MIN_486();
10426 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
10427}
10428
10429
10430/** Opcode 0x0f 0xce. */
10431FNIEMOP_DEF(iemOp_bswap_rSI_r14)
10432{
10433 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
10434 IEMOP_HLP_MIN_486();
10435 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
10436}
10437
10438
10439/** Opcode 0x0f 0xcf. */
10440FNIEMOP_DEF(iemOp_bswap_rDI_r15)
10441{
10442 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
10443 IEMOP_HLP_MIN_486();
10444 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
10445}
10446
10447
10448/* Opcode 0x0f 0xd0 - invalid */
10449
10450
10451/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
10452FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
10453{
10454 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
10455 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
10456}
10457
10458
10459/* Opcode 0xf3 0x0f 0xd0 - invalid */
10460
10461
10462/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
10463FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
10464{
10465 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
10466 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
10467}
10468
10469
10470
10471/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
10472FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
10473{
10474 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10475 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
10476}
10477
10478/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
10479FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
10480{
10481 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10482 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
10483}
10484
10485/* Opcode 0xf3 0x0f 0xd1 - invalid */
10486/* Opcode 0xf2 0x0f 0xd1 - invalid */
10487
10488/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
10489FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
10490{
10491 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10492 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
10493}
10494
10495
10496/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
10497FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
10498{
10499 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10500 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
10501}
10502
10503
10504/* Opcode 0xf3 0x0f 0xd2 - invalid */
10505/* Opcode 0xf2 0x0f 0xd2 - invalid */
10506
10507/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
10508FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
10509{
10510 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10511 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
10512}
10513
10514
10515/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
10516FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
10517{
10518 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10519 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
10520}
10521
10522
10523/* Opcode 0xf3 0x0f 0xd3 - invalid */
10524/* Opcode 0xf2 0x0f 0xd3 - invalid */
10525
10526
10527/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
10528FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
10529{
10530 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10531 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10532}
10533
10534
10535/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
10536FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
10537{
10538 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10539 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
10540}
10541
10542
10543/* Opcode 0xf3 0x0f 0xd4 - invalid */
10544/* Opcode 0xf2 0x0f 0xd4 - invalid */
10545
10546/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
10547FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
10548{
10549 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10550 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
10551}
10552
10553/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
10554FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
10555{
10556 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10557 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
10558}
10559
10560
10561/* Opcode 0xf3 0x0f 0xd5 - invalid */
10562/* Opcode 0xf2 0x0f 0xd5 - invalid */
10563
10564/* Opcode 0x0f 0xd6 - invalid */
10565
10566/**
10567 * @opcode 0xd6
10568 * @oppfx 0x66
10569 * @opcpuid sse2
10570 * @opgroup og_sse2_pcksclr_datamove
10571 * @opxcpttype none
10572 * @optest op1=-1 op2=2 -> op1=2
10573 * @optest op1=0 op2=-42 -> op1=-42
10574 */
10575FNIEMOP_DEF(iemOp_movq_Wq_Vq)
10576{
10577 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10579 if (IEM_IS_MODRM_REG_MODE(bRm))
10580 {
10581 /*
10582 * Register, register.
10583 */
10584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10585 IEM_MC_BEGIN(0, 2);
10586 IEM_MC_LOCAL(uint64_t, uSrc);
10587
10588 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10589 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
10590
10591 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10592 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
10593
10594 IEM_MC_ADVANCE_RIP();
10595 IEM_MC_END();
10596 }
10597 else
10598 {
10599 /*
10600 * Memory, register.
10601 */
10602 IEM_MC_BEGIN(0, 2);
10603 IEM_MC_LOCAL(uint64_t, uSrc);
10604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10605
10606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10608 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10609 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10610
10611 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10612 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10613
10614 IEM_MC_ADVANCE_RIP();
10615 IEM_MC_END();
10616 }
10617 return VINF_SUCCESS;
10618}
10619
10620
10621/**
10622 * @opcode 0xd6
10623 * @opcodesub 11 mr/reg
10624 * @oppfx f3
10625 * @opcpuid sse2
10626 * @opgroup og_sse2_simdint_datamove
10627 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10628 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10629 */
10630FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
10631{
10632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10633 if (IEM_IS_MODRM_REG_MODE(bRm))
10634 {
10635 /*
10636 * Register, register.
10637 */
10638 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10640 IEM_MC_BEGIN(0, 1);
10641 IEM_MC_LOCAL(uint64_t, uSrc);
10642
10643 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10644 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10645
10646 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
10647 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
10648 IEM_MC_FPU_TO_MMX_MODE();
10649
10650 IEM_MC_ADVANCE_RIP();
10651 IEM_MC_END();
10652 return VINF_SUCCESS;
10653 }
10654
10655 /**
10656 * @opdone
10657 * @opmnemonic udf30fd6mem
10658 * @opcode 0xd6
10659 * @opcodesub !11 mr/reg
10660 * @oppfx f3
10661 * @opunused intel-modrm
10662 * @opcpuid sse
10663 * @optest ->
10664 */
10665 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10666}
10667
10668
10669/**
10670 * @opcode 0xd6
10671 * @opcodesub 11 mr/reg
10672 * @oppfx f2
10673 * @opcpuid sse2
10674 * @opgroup og_sse2_simdint_datamove
10675 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10676 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10677 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
10678 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
10679 * @optest op1=-42 op2=0xfedcba9876543210
10680 * -> op1=0xfedcba9876543210 ftw=0xff
10681 */
10682FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
10683{
10684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10685 if (IEM_IS_MODRM_REG_MODE(bRm))
10686 {
10687 /*
10688 * Register, register.
10689 */
10690 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10692 IEM_MC_BEGIN(0, 1);
10693 IEM_MC_LOCAL(uint64_t, uSrc);
10694
10695 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10696 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10697
10698 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10699 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
10700 IEM_MC_FPU_TO_MMX_MODE();
10701
10702 IEM_MC_ADVANCE_RIP();
10703 IEM_MC_END();
10704 return VINF_SUCCESS;
10705 }
10706
10707 /**
10708 * @opdone
10709 * @opmnemonic udf20fd6mem
10710 * @opcode 0xd6
10711 * @opcodesub !11 mr/reg
10712 * @oppfx f2
10713 * @opunused intel-modrm
10714 * @opcpuid sse
10715 * @optest ->
10716 */
10717 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10718}
10719
10720
10721/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
10722FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
10723{
10724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10725 /* Docs says register only. */
10726 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10727 {
10728 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10729 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
10730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10731 IEM_MC_BEGIN(2, 0);
10732 IEM_MC_ARG(uint64_t *, puDst, 0);
10733 IEM_MC_ARG(uint64_t const *, puSrc, 1);
10734 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
10735 IEM_MC_PREPARE_FPU_USAGE();
10736 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
10737 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
10738 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
10739 IEM_MC_FPU_TO_MMX_MODE();
10740 IEM_MC_ADVANCE_RIP();
10741 IEM_MC_END();
10742 return VINF_SUCCESS;
10743 }
10744 return IEMOP_RAISE_INVALID_OPCODE();
10745}
10746
10747
10748/** Opcode 0x66 0x0f 0xd7 - */
10749FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
10750{
10751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10752 /* Docs says register only. */
10753 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10754 {
10755 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10756 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
10757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10758 IEM_MC_BEGIN(2, 0);
10759 IEM_MC_ARG(uint64_t *, puDst, 0);
10760 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
10761 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10762 IEM_MC_PREPARE_SSE_USAGE();
10763 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10764 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10765 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
10766 IEM_MC_ADVANCE_RIP();
10767 IEM_MC_END();
10768 return VINF_SUCCESS;
10769 }
10770 return IEMOP_RAISE_INVALID_OPCODE();
10771}
10772
10773
10774/* Opcode 0xf3 0x0f 0xd7 - invalid */
10775/* Opcode 0xf2 0x0f 0xd7 - invalid */
10776
10777
10778/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
10779FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
10780{
10781 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10782 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
10783}
10784
10785
10786/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
10787FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
10788{
10789 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10790 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
10791}
10792
10793
10794/* Opcode 0xf3 0x0f 0xd8 - invalid */
10795/* Opcode 0xf2 0x0f 0xd8 - invalid */
10796
10797/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
10798FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
10799{
10800 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10801 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
10802}
10803
10804
10805/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
10806FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
10807{
10808 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10809 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
10810}
10811
10812
10813/* Opcode 0xf3 0x0f 0xd9 - invalid */
10814/* Opcode 0xf2 0x0f 0xd9 - invalid */
10815
10816/** Opcode 0x0f 0xda - pminub Pq, Qq */
10817FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
10818{
10819 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10820 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
10821}
10822
10823
10824/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
10825FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
10826{
10827 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10828 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
10829}
10830
10831/* Opcode 0xf3 0x0f 0xda - invalid */
10832/* Opcode 0xf2 0x0f 0xda - invalid */
10833
10834/** Opcode 0x0f 0xdb - pand Pq, Qq */
10835FNIEMOP_DEF(iemOp_pand_Pq_Qq)
10836{
10837 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10838 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
10839}
10840
10841
10842/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
10843FNIEMOP_DEF(iemOp_pand_Vx_Wx)
10844{
10845 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10846 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
10847}
10848
10849
10850/* Opcode 0xf3 0x0f 0xdb - invalid */
10851/* Opcode 0xf2 0x0f 0xdb - invalid */
10852
10853/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
10854FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
10855{
10856 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10857 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
10858}
10859
10860
10861/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10862FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10863{
10864 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10865 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10866}
10867
10868
10869/* Opcode 0xf3 0x0f 0xdc - invalid */
10870/* Opcode 0xf2 0x0f 0xdc - invalid */
10871
10872/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10873FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10874{
10875 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10876 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10877}
10878
10879
10880/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
10881FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
10882{
10883 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10884 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
10885}
10886
10887
10888/* Opcode 0xf3 0x0f 0xdd - invalid */
10889/* Opcode 0xf2 0x0f 0xdd - invalid */
10890
10891/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
10892FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
10893{
10894 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10895 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
10896}
10897
10898
10899/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
10900FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
10901{
10902 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10903 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
10904}
10905
10906/* Opcode 0xf3 0x0f 0xde - invalid */
10907/* Opcode 0xf2 0x0f 0xde - invalid */
10908
10909
10910/** Opcode 0x0f 0xdf - pandn Pq, Qq */
10911FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
10912{
10913 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10914 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
10915}
10916
10917
10918/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10919FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10920{
10921 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10922 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10923}
10924
10925
10926/* Opcode 0xf3 0x0f 0xdf - invalid */
10927/* Opcode 0xf2 0x0f 0xdf - invalid */
10928
10929/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10930FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
10931{
10932 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10933 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
10934}
10935
10936
10937/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10938FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
10939{
10940 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10941 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
10942}
10943
10944
10945/* Opcode 0xf3 0x0f 0xe0 - invalid */
10946/* Opcode 0xf2 0x0f 0xe0 - invalid */
10947
10948/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10949FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10950{
10951 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10952 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10953}
10954
10955
10956/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10957FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10958{
10959 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10960 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10961}
10962
10963
10964/* Opcode 0xf3 0x0f 0xe1 - invalid */
10965/* Opcode 0xf2 0x0f 0xe1 - invalid */
10966
10967/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10968FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10969{
10970 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10971 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10972}
10973
10974
10975/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10976FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10977{
10978 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10979 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10980}
10981
10982
10983/* Opcode 0xf3 0x0f 0xe2 - invalid */
10984/* Opcode 0xf2 0x0f 0xe2 - invalid */
10985
10986/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10987FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
10988{
10989 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10990 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
10991}
10992
10993
10994/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10995FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
10996{
10997 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10998 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
10999}
11000
11001
11002/* Opcode 0xf3 0x0f 0xe3 - invalid */
11003/* Opcode 0xf2 0x0f 0xe3 - invalid */
11004
11005/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
11006FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
11007{
11008 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11009 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
11010}
11011
11012
11013/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
11014FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
11015{
11016 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11017 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
11018}
11019
11020
11021/* Opcode 0xf3 0x0f 0xe4 - invalid */
11022/* Opcode 0xf2 0x0f 0xe4 - invalid */
11023
11024/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
11025FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
11026{
11027 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11028 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
11029}
11030
11031
11032/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
11033FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
11034{
11035 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11036 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
11037}
11038
11039
11040/* Opcode 0xf3 0x0f 0xe5 - invalid */
11041/* Opcode 0xf2 0x0f 0xe5 - invalid */
11042
11043/* Opcode 0x0f 0xe6 - invalid */
11044/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
11045FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
11046/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
11047FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
11048/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
11049FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
11050
11051
11052/**
11053 * @opcode 0xe7
11054 * @opcodesub !11 mr/reg
11055 * @oppfx none
11056 * @opcpuid sse
11057 * @opgroup og_sse1_cachect
11058 * @opxcpttype none
11059 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
11060 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
11061 */
11062FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
11063{
11064 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11066 if (IEM_IS_MODRM_MEM_MODE(bRm))
11067 {
11068 /* Register, memory. */
11069 IEM_MC_BEGIN(0, 2);
11070 IEM_MC_LOCAL(uint64_t, uSrc);
11071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11072
11073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11075 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
11076 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11077
11078 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
11079 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
11080 IEM_MC_FPU_TO_MMX_MODE();
11081
11082 IEM_MC_ADVANCE_RIP();
11083 IEM_MC_END();
11084 return VINF_SUCCESS;
11085 }
11086 /**
11087 * @opdone
11088 * @opmnemonic ud0fe7reg
11089 * @opcode 0xe7
11090 * @opcodesub 11 mr/reg
11091 * @oppfx none
11092 * @opunused immediate
11093 * @opcpuid sse
11094 * @optest ->
11095 */
11096 return IEMOP_RAISE_INVALID_OPCODE();
11097}
11098
11099/**
11100 * @opcode 0xe7
11101 * @opcodesub !11 mr/reg
11102 * @oppfx 0x66
11103 * @opcpuid sse2
11104 * @opgroup og_sse2_cachect
11105 * @opxcpttype 1
11106 * @optest op1=-1 op2=2 -> op1=2
11107 * @optest op1=0 op2=-42 -> op1=-42
11108 */
11109FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
11110{
11111 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11113 if (IEM_IS_MODRM_MEM_MODE(bRm))
11114 {
11115 /* Register, memory. */
11116 IEM_MC_BEGIN(0, 2);
11117 IEM_MC_LOCAL(RTUINT128U, uSrc);
11118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11119
11120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11122 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
11123 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
11124
11125 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
11126 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
11127
11128 IEM_MC_ADVANCE_RIP();
11129 IEM_MC_END();
11130 return VINF_SUCCESS;
11131 }
11132
11133 /**
11134 * @opdone
11135 * @opmnemonic ud660fe7reg
11136 * @opcode 0xe7
11137 * @opcodesub 11 mr/reg
11138 * @oppfx 0x66
11139 * @opunused immediate
11140 * @opcpuid sse
11141 * @optest ->
11142 */
11143 return IEMOP_RAISE_INVALID_OPCODE();
11144}
11145
11146/* Opcode 0xf3 0x0f 0xe7 - invalid */
11147/* Opcode 0xf2 0x0f 0xe7 - invalid */
11148
11149
11150/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
11151FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
11152{
11153 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11154 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
11155}
11156
11157
11158/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
11159FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
11160{
11161 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11162 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
11163}
11164
11165
11166/* Opcode 0xf3 0x0f 0xe8 - invalid */
11167/* Opcode 0xf2 0x0f 0xe8 - invalid */
11168
11169/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
11170FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
11171{
11172 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11173 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
11174}
11175
11176
11177/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
11178FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
11179{
11180 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11181 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
11182}
11183
11184
11185/* Opcode 0xf3 0x0f 0xe9 - invalid */
11186/* Opcode 0xf2 0x0f 0xe9 - invalid */
11187
11188
11189/** Opcode 0x0f 0xea - pminsw Pq, Qq */
11190FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
11191{
11192 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11193 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
11194}
11195
11196
11197/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
11198FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
11199{
11200 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11201 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
11202}
11203
11204
11205/* Opcode 0xf3 0x0f 0xea - invalid */
11206/* Opcode 0xf2 0x0f 0xea - invalid */
11207
11208
11209/** Opcode 0x0f 0xeb - por Pq, Qq */
11210FNIEMOP_DEF(iemOp_por_Pq_Qq)
11211{
11212 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11213 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
11214}
11215
11216
11217/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
11218FNIEMOP_DEF(iemOp_por_Vx_Wx)
11219{
11220 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11221 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
11222}
11223
11224
11225/* Opcode 0xf3 0x0f 0xeb - invalid */
11226/* Opcode 0xf2 0x0f 0xeb - invalid */
11227
11228/** Opcode 0x0f 0xec - paddsb Pq, Qq */
11229FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
11230{
11231 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11232 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
11233}
11234
11235
11236/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
11237FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
11238{
11239 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11240 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
11241}
11242
11243
11244/* Opcode 0xf3 0x0f 0xec - invalid */
11245/* Opcode 0xf2 0x0f 0xec - invalid */
11246
11247/** Opcode 0x0f 0xed - paddsw Pq, Qq */
11248FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
11249{
11250 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11251 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
11252}
11253
11254
11255/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
11256FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
11257{
11258 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11259 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
11260}
11261
11262
11263/* Opcode 0xf3 0x0f 0xed - invalid */
11264/* Opcode 0xf2 0x0f 0xed - invalid */
11265
11266
11267/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
11268FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
11269{
11270 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11271 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
11272}
11273
11274
11275/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
11276FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
11277{
11278 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11279 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
11280}
11281
11282
11283/* Opcode 0xf3 0x0f 0xee - invalid */
11284/* Opcode 0xf2 0x0f 0xee - invalid */
11285
11286
11287/** Opcode 0x0f 0xef - pxor Pq, Qq */
11288FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
11289{
11290 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11291 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
11292}
11293
11294
11295/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
11296FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
11297{
11298 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11299 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
11300}
11301
11302
11303/* Opcode 0xf3 0x0f 0xef - invalid */
11304/* Opcode 0xf2 0x0f 0xef - invalid */
11305
11306/* Opcode 0x0f 0xf0 - invalid */
11307/* Opcode 0x66 0x0f 0xf0 - invalid */
11308/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
11309FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
11310
11311
11312/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
11313FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
11314{
11315 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11316 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
11317}
11318
11319
11320/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
11321FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
11322{
11323 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11324 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
11325}
11326
11327
11328/* Opcode 0xf2 0x0f 0xf1 - invalid */
11329
11330/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
11331FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
11332{
11333 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11334 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
11335}
11336
11337
11338/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
11339FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
11340{
11341 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11342 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
11343}
11344
11345
11346/* Opcode 0xf2 0x0f 0xf2 - invalid */
11347
11348/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
11349FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
11350{
11351 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11352 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
11353}
11354
11355
11356/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
11357FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
11358{
11359 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11360 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
11361}
11362
11363/* Opcode 0xf2 0x0f 0xf3 - invalid */
11364
11365/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
11366FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
11367{
11368 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11369 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
11370}
11371
11372
11373/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
11374FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
11375{
11376 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11377 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
11378}
11379
11380
11381/* Opcode 0xf2 0x0f 0xf4 - invalid */
11382
11383/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
11384FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
11385{
11386 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11387 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
11388}
11389
11390
11391/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
11392FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
11393{
11394 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11395 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
11396}
11397
11398/* Opcode 0xf2 0x0f 0xf5 - invalid */
11399
11400/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
11401FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
11402{
11403 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
11404 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
11405}
11406
11407
11408/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
11409FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
11410{
11411 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11412 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
11413}
11414
11415
11416/* Opcode 0xf2 0x0f 0xf6 - invalid */
11417
11418/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
11419FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
11420/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
11421FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
11422/* Opcode 0xf2 0x0f 0xf7 - invalid */
11423
11424
11425/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
11426FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
11427{
11428 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11429 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
11430}
11431
11432
11433/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
11434FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
11435{
11436 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11437 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
11438}
11439
11440
11441/* Opcode 0xf2 0x0f 0xf8 - invalid */
11442
11443
11444/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
11445FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
11446{
11447 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11448 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
11449}
11450
11451
11452/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
11453FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
11454{
11455 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11456 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
11457}
11458
11459
11460/* Opcode 0xf2 0x0f 0xf9 - invalid */
11461
11462
11463/** Opcode 0x0f 0xfa - psubd Pq, Qq */
11464FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
11465{
11466 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11467 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
11468}
11469
11470
11471/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
11472FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
11473{
11474 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11475 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
11476}
11477
11478
11479/* Opcode 0xf2 0x0f 0xfa - invalid */
11480
11481
11482/** Opcode 0x0f 0xfb - psubq Pq, Qq */
11483FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
11484{
11485 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11486 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
11487}
11488
11489
11490/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
11491FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
11492{
11493 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11494 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
11495}
11496
11497
11498/* Opcode 0xf2 0x0f 0xfb - invalid */
11499
11500
11501/** Opcode 0x0f 0xfc - paddb Pq, Qq */
11502FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
11503{
11504 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11505 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
11506}
11507
11508
11509/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
11510FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
11511{
11512 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11513 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
11514}
11515
11516
11517/* Opcode 0xf2 0x0f 0xfc - invalid */
11518
11519
11520/** Opcode 0x0f 0xfd - paddw Pq, Qq */
11521FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
11522{
11523 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11524 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
11525}
11526
11527
11528/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
11529FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
11530{
11531 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11532 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
11533}
11534
11535
11536/* Opcode 0xf2 0x0f 0xfd - invalid */
11537
11538
11539/** Opcode 0x0f 0xfe - paddd Pq, Qq */
11540FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
11541{
11542 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11543 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
11544}
11545
11546
11547/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
11548FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
11549{
11550 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11551 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
11552}
11553
11554
11555/* Opcode 0xf2 0x0f 0xfe - invalid */
11556
11557
11558/** Opcode **** 0x0f 0xff - UD0 */
11559FNIEMOP_DEF(iemOp_ud0)
11560{
11561 IEMOP_MNEMONIC(ud0, "ud0");
11562 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
11563 {
11564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
11565#ifndef TST_IEM_CHECK_MC
11566 if (IEM_IS_MODRM_MEM_MODE(bRm))
11567 {
11568 RTGCPTR GCPtrEff;
11569 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
11570 if (rcStrict != VINF_SUCCESS)
11571 return rcStrict;
11572 }
11573#endif
11574 IEMOP_HLP_DONE_DECODING();
11575 }
11576 return IEMOP_RAISE_INVALID_OPCODE();
11577}
11578
11579
11580
11581/**
11582 * Two byte opcode map, first byte 0x0f.
11583 *
11584 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
11585 * check if it needs updating as well when making changes.
11586 */
11587IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
11588{
11589 /* no prefix, 066h prefix f3h prefix, f2h prefix */
11590 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
11591 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
11592 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
11593 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
11594 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
11595 /* 0x05 */ IEMOP_X4(iemOp_syscall),
11596 /* 0x06 */ IEMOP_X4(iemOp_clts),
11597 /* 0x07 */ IEMOP_X4(iemOp_sysret),
11598 /* 0x08 */ IEMOP_X4(iemOp_invd),
11599 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
11600 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
11601 /* 0x0b */ IEMOP_X4(iemOp_ud2),
11602 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
11603 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
11604 /* 0x0e */ IEMOP_X4(iemOp_femms),
11605 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
11606
11607 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
11608 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
11609 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
11610 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11611 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11612 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11613 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
11614 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11615 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
11616 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
11617 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
11618 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
11619 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
11620 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
11621 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
11622 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
11623
11624 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
11625 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
11626 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
11627 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
11628 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
11629 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11630 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
11631 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11632 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11633 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11634 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
11635 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11636 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
11637 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
11638 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11639 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11640
11641 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
11642 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
11643 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
11644 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
11645 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
11646 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
11647 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
11648 /* 0x37 */ IEMOP_X4(iemOp_getsec),
11649 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
11650 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11651 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
11652 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11653 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11654 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11655 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11656 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11657
11658 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
11659 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
11660 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
11661 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
11662 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
11663 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
11664 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
11665 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
11666 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
11667 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
11668 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
11669 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
11670 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
11671 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
11672 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
11673 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
11674
11675 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11676 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
11677 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
11678 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
11679 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11680 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11681 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11682 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11683 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
11684 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
11685 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
11686 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
11687 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
11688 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
11689 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
11690 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
11691
11692 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11693 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11694 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11695 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11696 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11697 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11698 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11699 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11700 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11701 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11702 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11703 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11704 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11705 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11706 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11707 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
11708
11709 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
11710 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
11711 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
11712 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
11713 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11714 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11715 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11716 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11717
11718 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11719 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11720 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11721 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11722 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
11723 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
11724 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
11725 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
11726
11727 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
11728 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
11729 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
11730 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
11731 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
11732 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
11733 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
11734 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
11735 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
11736 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
11737 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
11738 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
11739 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
11740 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
11741 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
11742 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
11743
11744 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
11745 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
11746 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
11747 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
11748 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
11749 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
11750 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
11751 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
11752 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
11753 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
11754 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
11755 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
11756 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
11757 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
11758 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
11759 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
11760
11761 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
11762 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
11763 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
11764 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
11765 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
11766 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
11767 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
11768 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
11769 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
11770 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
11771 /* 0xaa */ IEMOP_X4(iemOp_rsm),
11772 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
11773 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
11774 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
11775 /* 0xae */ IEMOP_X4(iemOp_Grp15),
11776 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
11777
11778 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
11779 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
11780 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
11781 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
11782 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
11783 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
11784 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
11785 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
11786 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
11787 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
11788 /* 0xba */ IEMOP_X4(iemOp_Grp8),
11789 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
11790 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
11791 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
11792 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
11793 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
11794
11795 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
11796 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
11797 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
11798 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11799 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11800 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11801 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11802 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
11803 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
11804 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
11805 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
11806 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
11807 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
11808 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
11809 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
11810 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
11811
11812 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
11813 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11814 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11815 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11816 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11817 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11818 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
11819 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11820 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11821 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11822 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11823 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11824 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11825 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11826 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11827 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11828
11829 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11830 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11831 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11832 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11833 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11834 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11835 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
11836 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11837 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11838 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11839 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11840 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11841 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11842 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11843 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11844 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11845
11846 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
11847 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11848 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11849 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11850 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11851 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11852 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11853 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11854 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11855 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11856 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11857 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11858 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11859 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11860 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11861 /* 0xff */ IEMOP_X4(iemOp_ud0),
11862};
11863AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
11864
11865/** @} */
11866
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette