VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96326

Last change on this file since 96326 was 96326, checked in by vboxsync, 2 years ago

VMM/IEM: addpd, mulpd and subpd are available wih SSE2+ only, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 382.4 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96326 2022-08-19 08:51:02Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * for instructions introduced with SSE.
149 */
150FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for MMX instructions on the form:
205 * pxxx mm1, mm2/mem64
206 * for instructions introduced with SSE.
207 *
208 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
209 * no FXSAVE state, just the operands.
210 */
211FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
212{
213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
214 if (IEM_IS_MODRM_REG_MODE(bRm))
215 {
216 /*
217 * Register, register.
218 */
219 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
220 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
222 IEM_MC_BEGIN(2, 0);
223 IEM_MC_ARG(uint64_t *, pDst, 0);
224 IEM_MC_ARG(uint64_t const *, pSrc, 1);
225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
226 IEM_MC_PREPARE_FPU_USAGE();
227 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
228 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
229 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
230 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
231 IEM_MC_FPU_TO_MMX_MODE();
232 IEM_MC_ADVANCE_RIP();
233 IEM_MC_END();
234 }
235 else
236 {
237 /*
238 * Register, memory.
239 */
240 IEM_MC_BEGIN(2, 2);
241 IEM_MC_ARG(uint64_t *, pDst, 0);
242 IEM_MC_LOCAL(uint64_t, uSrc);
243 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250
251 IEM_MC_PREPARE_FPU_USAGE();
252 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
253 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
254 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
255 IEM_MC_FPU_TO_MMX_MODE();
256
257 IEM_MC_ADVANCE_RIP();
258 IEM_MC_END();
259 }
260 return VINF_SUCCESS;
261}
262
263
264/**
265 * Common worker for MMX instructions on the form:
266 * pxxx mm1, mm2/mem64
267 * that was introduced with SSE2.
268 */
269FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
278 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
280 IEM_MC_BEGIN(2, 0);
281 IEM_MC_ARG(uint64_t *, pDst, 0);
282 IEM_MC_ARG(uint64_t const *, pSrc, 1);
283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
284 IEM_MC_PREPARE_FPU_USAGE();
285 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
286 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
287 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
288 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
289 IEM_MC_FPU_TO_MMX_MODE();
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 else
294 {
295 /*
296 * Register, memory.
297 */
298 IEM_MC_BEGIN(2, 2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_LOCAL(uint64_t, uSrc);
301 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
303
304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
307 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
308
309 IEM_MC_PREPARE_FPU_USAGE();
310 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313 IEM_MC_FPU_TO_MMX_MODE();
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 }
318 return VINF_SUCCESS;
319}
320
321
322/**
323 * Common worker for SSE2 instructions on the forms:
324 * pxxx xmm1, xmm2/mem128
325 *
326 * Proper alignment of the 128-bit operand is enforced.
327 * Exceptions type 4. SSE2 cpuid checks.
328 *
329 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
330 */
331FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
332{
333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
334 if (IEM_IS_MODRM_REG_MODE(bRm))
335 {
336 /*
337 * Register, register.
338 */
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEM_MC_BEGIN(2, 0);
341 IEM_MC_ARG(PRTUINT128U, pDst, 0);
342 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
344 IEM_MC_PREPARE_SSE_USAGE();
345 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
346 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
347 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 }
351 else
352 {
353 /*
354 * Register, memory.
355 */
356 IEM_MC_BEGIN(2, 2);
357 IEM_MC_ARG(PRTUINT128U, pDst, 0);
358 IEM_MC_LOCAL(RTUINT128U, uSrc);
359 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
361
362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
365 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366
367 IEM_MC_PREPARE_SSE_USAGE();
368 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
369 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
370
371 IEM_MC_ADVANCE_RIP();
372 IEM_MC_END();
373 }
374 return VINF_SUCCESS;
375}
376
377
378/**
379 * Common worker for SSE2 instructions on the forms:
380 * pxxx xmm1, xmm2/mem128
381 *
382 * Proper alignment of the 128-bit operand is enforced.
383 * Exceptions type 4. SSE2 cpuid checks.
384 *
385 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
386 * no FXSAVE state, just the operands.
387 *
388 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
389 */
390FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
391{
392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
393 if (IEM_IS_MODRM_REG_MODE(bRm))
394 {
395 /*
396 * Register, register.
397 */
398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
399 IEM_MC_BEGIN(2, 0);
400 IEM_MC_ARG(PRTUINT128U, pDst, 0);
401 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
403 IEM_MC_PREPARE_SSE_USAGE();
404 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
405 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
406 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 }
410 else
411 {
412 /*
413 * Register, memory.
414 */
415 IEM_MC_BEGIN(2, 2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_LOCAL(RTUINT128U, uSrc);
418 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
420
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
425
426 IEM_MC_PREPARE_SSE_USAGE();
427 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
428 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
429
430 IEM_MC_ADVANCE_RIP();
431 IEM_MC_END();
432 }
433 return VINF_SUCCESS;
434}
435
436
437/**
438 * Common worker for MMX instructions on the forms:
439 * pxxxx mm1, mm2/mem32
440 *
441 * The 2nd operand is the first half of a register, which in the memory case
442 * means a 32-bit memory access.
443 */
444FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
445{
446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
447 if (IEM_IS_MODRM_REG_MODE(bRm))
448 {
449 /*
450 * Register, register.
451 */
452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
453 IEM_MC_BEGIN(2, 0);
454 IEM_MC_ARG(uint64_t *, puDst, 0);
455 IEM_MC_ARG(uint64_t const *, puSrc, 1);
456 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
457 IEM_MC_PREPARE_FPU_USAGE();
458 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
459 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
460 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
461 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
462 IEM_MC_FPU_TO_MMX_MODE();
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 }
466 else
467 {
468 /*
469 * Register, memory.
470 */
471 IEM_MC_BEGIN(2, 2);
472 IEM_MC_ARG(uint64_t *, puDst, 0);
473 IEM_MC_LOCAL(uint64_t, uSrc);
474 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
476
477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
479 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
480 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
481
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
484 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
485 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
486 IEM_MC_FPU_TO_MMX_MODE();
487
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 return VINF_SUCCESS;
492}
493
494
495/**
496 * Common worker for SSE instructions on the forms:
497 * pxxxx xmm1, xmm2/mem128
498 *
499 * The 2nd operand is the first half of a register, which in the memory case
500 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
501 *
502 * Exceptions type 4.
503 */
504FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
505{
506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
507 if (IEM_IS_MODRM_REG_MODE(bRm))
508 {
509 /*
510 * Register, register.
511 */
512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
513 IEM_MC_BEGIN(2, 0);
514 IEM_MC_ARG(PRTUINT128U, puDst, 0);
515 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
518 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
519 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
520 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
521 IEM_MC_ADVANCE_RIP();
522 IEM_MC_END();
523 }
524 else
525 {
526 /*
527 * Register, memory.
528 */
529 IEM_MC_BEGIN(2, 2);
530 IEM_MC_ARG(PRTUINT128U, puDst, 0);
531 IEM_MC_LOCAL(RTUINT128U, uSrc);
532 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
538 /** @todo Most CPUs probably only read the low qword. We read everything to
539 * make sure we apply segmentation and alignment checks correctly.
540 * When we have time, it would be interesting to explore what real
541 * CPUs actually does and whether it will do a TLB load for the high
542 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
543 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
544
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
548
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 return VINF_SUCCESS;
553}
554
555
556/**
557 * Common worker for SSE2 instructions on the forms:
558 * pxxxx xmm1, xmm2/mem128
559 *
560 * The 2nd operand is the first half of a register, which in the memory case
561 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
562 *
563 * Exceptions type 4.
564 */
565FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
566{
567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
568 if (IEM_IS_MODRM_REG_MODE(bRm))
569 {
570 /*
571 * Register, register.
572 */
573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
574 IEM_MC_BEGIN(2, 0);
575 IEM_MC_ARG(PRTUINT128U, puDst, 0);
576 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
579 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
580 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
581 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 }
585 else
586 {
587 /*
588 * Register, memory.
589 */
590 IEM_MC_BEGIN(2, 2);
591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
592 IEM_MC_LOCAL(RTUINT128U, uSrc);
593 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
595
596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
599 /** @todo Most CPUs probably only read the low qword. We read everything to
600 * make sure we apply segmentation and alignment checks correctly.
601 * When we have time, it would be interesting to explore what real
602 * CPUs actually does and whether it will do a TLB load for the high
603 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
604 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
605
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
609
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 return VINF_SUCCESS;
614}
615
616
617/**
618 * Common worker for MMX instructions on the form:
619 * pxxxx mm1, mm2/mem64
620 *
621 * The 2nd operand is the second half of a register, which in the memory case
622 * means a 64-bit memory access for MMX.
623 */
624FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
625{
626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
627 if (IEM_IS_MODRM_REG_MODE(bRm))
628 {
629 /*
630 * Register, register.
631 */
632 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
633 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
635 IEM_MC_BEGIN(2, 0);
636 IEM_MC_ARG(uint64_t *, puDst, 0);
637 IEM_MC_ARG(uint64_t const *, puSrc, 1);
638 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
639 IEM_MC_PREPARE_FPU_USAGE();
640 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
641 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
642 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
643 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
644 IEM_MC_FPU_TO_MMX_MODE();
645 IEM_MC_ADVANCE_RIP();
646 IEM_MC_END();
647 }
648 else
649 {
650 /*
651 * Register, memory.
652 */
653 IEM_MC_BEGIN(2, 2);
654 IEM_MC_ARG(uint64_t *, puDst, 0);
655 IEM_MC_LOCAL(uint64_t, uSrc);
656 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
658
659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
661 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
662 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
663
664 IEM_MC_PREPARE_FPU_USAGE();
665 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
666 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
667 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_ADVANCE_RIP();
671 IEM_MC_END();
672 }
673 return VINF_SUCCESS;
674}
675
676
677/**
678 * Common worker for SSE instructions on the form:
679 * pxxxx xmm1, xmm2/mem128
680 *
681 * The 2nd operand is the second half of a register, which for SSE a 128-bit
682 * aligned access where it may read the full 128 bits or only the upper 64 bits.
683 *
684 * Exceptions type 4.
685 */
686FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
687{
688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
689 if (IEM_IS_MODRM_REG_MODE(bRm))
690 {
691 /*
692 * Register, register.
693 */
694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
695 IEM_MC_BEGIN(2, 0);
696 IEM_MC_ARG(PRTUINT128U, puDst, 0);
697 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
699 IEM_MC_PREPARE_SSE_USAGE();
700 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
701 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
702 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
703 IEM_MC_ADVANCE_RIP();
704 IEM_MC_END();
705 }
706 else
707 {
708 /*
709 * Register, memory.
710 */
711 IEM_MC_BEGIN(2, 2);
712 IEM_MC_ARG(PRTUINT128U, puDst, 0);
713 IEM_MC_LOCAL(RTUINT128U, uSrc);
714 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
716
717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
720 /** @todo Most CPUs probably only read the high qword. We read everything to
721 * make sure we apply segmentation and alignment checks correctly.
722 * When we have time, it would be interesting to explore what real
723 * CPUs actually does and whether it will do a TLB load for the lower
724 * part or skip any associated \#PF. */
725 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
726
727 IEM_MC_PREPARE_SSE_USAGE();
728 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
729 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
730
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 return VINF_SUCCESS;
735}
736
737
738/**
739 * Common worker for SSE instructions on the forms:
740 * pxxs xmm1, xmm2/mem128
741 *
742 * Proper alignment of the 128-bit operand is enforced.
743 * Exceptions type 2. SSE cpuid checks.
744 *
745 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
746 */
747FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
748{
749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
750 if (IEM_IS_MODRM_REG_MODE(bRm))
751 {
752 /*
753 * Register, register.
754 */
755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
758 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
759 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
760 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
761 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
762 IEM_MC_PREPARE_SSE_USAGE();
763 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
764 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
765 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
766 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
767 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
768
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 else
773 {
774 /*
775 * Register, memory.
776 */
777 IEM_MC_BEGIN(3, 2);
778 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
779 IEM_MC_LOCAL(X86XMMREG, uSrc2);
780 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
781 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
782 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
788 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789
790 IEM_MC_PREPARE_SSE_USAGE();
791 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
795
796 IEM_MC_ADVANCE_RIP();
797 IEM_MC_END();
798 }
799 return VINF_SUCCESS;
800}
801
802
803/**
804 * Common worker for SSE2 instructions on the forms:
805 * pxxd xmm1, xmm2/mem128
806 *
807 * Proper alignment of the 128-bit operand is enforced.
808 * Exceptions type 2. SSE cpuid checks.
809 *
810 * @sa iemOpCommonSseFp_FullFull_To_Full
811 */
812FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
813{
814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
815 if (IEM_IS_MODRM_REG_MODE(bRm))
816 {
817 /*
818 * Register, register.
819 */
820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
821 IEM_MC_BEGIN(3, 1);
822 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
823 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
824 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
825 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
826 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
827 IEM_MC_PREPARE_SSE_USAGE();
828 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
829 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
830 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
831 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
832 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
833
834 IEM_MC_ADVANCE_RIP();
835 IEM_MC_END();
836 }
837 else
838 {
839 /*
840 * Register, memory.
841 */
842 IEM_MC_BEGIN(3, 2);
843 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
844 IEM_MC_LOCAL(X86XMMREG, uSrc2);
845 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
846 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
847 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
849
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
853 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854
855 IEM_MC_PREPARE_SSE_USAGE();
856 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
857 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
858 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
859 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
860
861 IEM_MC_ADVANCE_RIP();
862 IEM_MC_END();
863 }
864 return VINF_SUCCESS;
865}
866
867
868/**
869 * Common worker for SSE2 instructions on the form:
870 * pxxxx xmm1, xmm2/mem128
871 *
872 * The 2nd operand is the second half of a register, which for SSE a 128-bit
873 * aligned access where it may read the full 128 bits or only the upper 64 bits.
874 *
875 * Exceptions type 4.
876 */
877FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
878{
879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
880 if (IEM_IS_MODRM_REG_MODE(bRm))
881 {
882 /*
883 * Register, register.
884 */
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_BEGIN(2, 0);
887 IEM_MC_ARG(PRTUINT128U, puDst, 0);
888 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
889 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
890 IEM_MC_PREPARE_SSE_USAGE();
891 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
892 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
893 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
894 IEM_MC_ADVANCE_RIP();
895 IEM_MC_END();
896 }
897 else
898 {
899 /*
900 * Register, memory.
901 */
902 IEM_MC_BEGIN(2, 2);
903 IEM_MC_ARG(PRTUINT128U, puDst, 0);
904 IEM_MC_LOCAL(RTUINT128U, uSrc);
905 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
907
908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
910 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
911 /** @todo Most CPUs probably only read the high qword. We read everything to
912 * make sure we apply segmentation and alignment checks correctly.
913 * When we have time, it would be interesting to explore what real
914 * CPUs actually does and whether it will do a TLB load for the lower
915 * part or skip any associated \#PF. */
916 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
917
918 IEM_MC_PREPARE_SSE_USAGE();
919 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
920 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
921
922 IEM_MC_ADVANCE_RIP();
923 IEM_MC_END();
924 }
925 return VINF_SUCCESS;
926}
927
928
929/** Opcode 0x0f 0x00 /0. */
930FNIEMOPRM_DEF(iemOp_Grp6_sldt)
931{
932 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
933 IEMOP_HLP_MIN_286();
934 IEMOP_HLP_NO_REAL_OR_V86_MODE();
935
936 if (IEM_IS_MODRM_REG_MODE(bRm))
937 {
938 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
939 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
940 }
941
942 /* Ignore operand size here, memory refs are always 16-bit. */
943 IEM_MC_BEGIN(2, 0);
944 IEM_MC_ARG(uint16_t, iEffSeg, 0);
945 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
947 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
948 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
949 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
950 IEM_MC_END();
951 return VINF_SUCCESS;
952}
953
954
955/** Opcode 0x0f 0x00 /1. */
956FNIEMOPRM_DEF(iemOp_Grp6_str)
957{
958 IEMOP_MNEMONIC(str, "str Rv/Mw");
959 IEMOP_HLP_MIN_286();
960 IEMOP_HLP_NO_REAL_OR_V86_MODE();
961
962
963 if (IEM_IS_MODRM_REG_MODE(bRm))
964 {
965 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
966 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
967 }
968
969 /* Ignore operand size here, memory refs are always 16-bit. */
970 IEM_MC_BEGIN(2, 0);
971 IEM_MC_ARG(uint16_t, iEffSeg, 0);
972 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
974 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
975 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
976 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
977 IEM_MC_END();
978 return VINF_SUCCESS;
979}
980
981
982/** Opcode 0x0f 0x00 /2. */
983FNIEMOPRM_DEF(iemOp_Grp6_lldt)
984{
985 IEMOP_MNEMONIC(lldt, "lldt Ew");
986 IEMOP_HLP_MIN_286();
987 IEMOP_HLP_NO_REAL_OR_V86_MODE();
988
989 if (IEM_IS_MODRM_REG_MODE(bRm))
990 {
991 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
992 IEM_MC_BEGIN(1, 0);
993 IEM_MC_ARG(uint16_t, u16Sel, 0);
994 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
995 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
996 IEM_MC_END();
997 }
998 else
999 {
1000 IEM_MC_BEGIN(1, 1);
1001 IEM_MC_ARG(uint16_t, u16Sel, 0);
1002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1004 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1005 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1006 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1007 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1008 IEM_MC_END();
1009 }
1010 return VINF_SUCCESS;
1011}
1012
1013
1014/** Opcode 0x0f 0x00 /3. */
1015FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1016{
1017 IEMOP_MNEMONIC(ltr, "ltr Ew");
1018 IEMOP_HLP_MIN_286();
1019 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1020
1021 if (IEM_IS_MODRM_REG_MODE(bRm))
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1024 IEM_MC_BEGIN(1, 0);
1025 IEM_MC_ARG(uint16_t, u16Sel, 0);
1026 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1027 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1028 IEM_MC_END();
1029 }
1030 else
1031 {
1032 IEM_MC_BEGIN(1, 1);
1033 IEM_MC_ARG(uint16_t, u16Sel, 0);
1034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1037 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1038 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1039 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1040 IEM_MC_END();
1041 }
1042 return VINF_SUCCESS;
1043}
1044
1045
1046/** Opcode 0x0f 0x00 /3. */
1047FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1048{
1049 IEMOP_HLP_MIN_286();
1050 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1051
1052 if (IEM_IS_MODRM_REG_MODE(bRm))
1053 {
1054 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1055 IEM_MC_BEGIN(2, 0);
1056 IEM_MC_ARG(uint16_t, u16Sel, 0);
1057 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1058 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1059 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1060 IEM_MC_END();
1061 }
1062 else
1063 {
1064 IEM_MC_BEGIN(2, 1);
1065 IEM_MC_ARG(uint16_t, u16Sel, 0);
1066 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1069 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1070 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1071 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1072 IEM_MC_END();
1073 }
1074 return VINF_SUCCESS;
1075}
1076
1077
1078/** Opcode 0x0f 0x00 /4. */
1079FNIEMOPRM_DEF(iemOp_Grp6_verr)
1080{
1081 IEMOP_MNEMONIC(verr, "verr Ew");
1082 IEMOP_HLP_MIN_286();
1083 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1084}
1085
1086
1087/** Opcode 0x0f 0x00 /5. */
1088FNIEMOPRM_DEF(iemOp_Grp6_verw)
1089{
1090 IEMOP_MNEMONIC(verw, "verw Ew");
1091 IEMOP_HLP_MIN_286();
1092 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1093}
1094
1095
1096/**
1097 * Group 6 jump table.
1098 */
1099IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1100{
1101 iemOp_Grp6_sldt,
1102 iemOp_Grp6_str,
1103 iemOp_Grp6_lldt,
1104 iemOp_Grp6_ltr,
1105 iemOp_Grp6_verr,
1106 iemOp_Grp6_verw,
1107 iemOp_InvalidWithRM,
1108 iemOp_InvalidWithRM
1109};
1110
1111/** Opcode 0x0f 0x00. */
1112FNIEMOP_DEF(iemOp_Grp6)
1113{
1114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1115 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1116}
1117
1118
1119/** Opcode 0x0f 0x01 /0. */
1120FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1121{
1122 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1123 IEMOP_HLP_MIN_286();
1124 IEMOP_HLP_64BIT_OP_SIZE();
1125 IEM_MC_BEGIN(2, 1);
1126 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1127 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1130 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1131 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1132 IEM_MC_END();
1133 return VINF_SUCCESS;
1134}
1135
1136
1137/** Opcode 0x0f 0x01 /0. */
1138FNIEMOP_DEF(iemOp_Grp7_vmcall)
1139{
1140 IEMOP_MNEMONIC(vmcall, "vmcall");
1141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1142
1143 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1144 want all hypercalls regardless of instruction used, and if a
1145 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1146 (NEM/win makes ASSUMPTIONS about this behavior.) */
1147 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1148}
1149
1150
1151/** Opcode 0x0f 0x01 /0. */
1152#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1153FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1154{
1155 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1156 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1157 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1158 IEMOP_HLP_DONE_DECODING();
1159 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1160}
1161#else
1162FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1163{
1164 IEMOP_BITCH_ABOUT_STUB();
1165 return IEMOP_RAISE_INVALID_OPCODE();
1166}
1167#endif
1168
1169
1170/** Opcode 0x0f 0x01 /0. */
1171#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1172FNIEMOP_DEF(iemOp_Grp7_vmresume)
1173{
1174 IEMOP_MNEMONIC(vmresume, "vmresume");
1175 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1176 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1177 IEMOP_HLP_DONE_DECODING();
1178 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1179}
1180#else
1181FNIEMOP_DEF(iemOp_Grp7_vmresume)
1182{
1183 IEMOP_BITCH_ABOUT_STUB();
1184 return IEMOP_RAISE_INVALID_OPCODE();
1185}
1186#endif
1187
1188
1189/** Opcode 0x0f 0x01 /0. */
1190#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1191FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1192{
1193 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1194 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1195 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1196 IEMOP_HLP_DONE_DECODING();
1197 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1198}
1199#else
1200FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1201{
1202 IEMOP_BITCH_ABOUT_STUB();
1203 return IEMOP_RAISE_INVALID_OPCODE();
1204}
1205#endif
1206
1207
1208/** Opcode 0x0f 0x01 /1. */
1209FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1210{
1211 IEMOP_MNEMONIC(sidt, "sidt Ms");
1212 IEMOP_HLP_MIN_286();
1213 IEMOP_HLP_64BIT_OP_SIZE();
1214 IEM_MC_BEGIN(2, 1);
1215 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1216 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1219 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1220 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1221 IEM_MC_END();
1222 return VINF_SUCCESS;
1223}
1224
1225
1226/** Opcode 0x0f 0x01 /1. */
1227FNIEMOP_DEF(iemOp_Grp7_monitor)
1228{
1229 IEMOP_MNEMONIC(monitor, "monitor");
1230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1231 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1232}
1233
1234
1235/** Opcode 0x0f 0x01 /1. */
1236FNIEMOP_DEF(iemOp_Grp7_mwait)
1237{
1238 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1240 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1241}
1242
1243
1244/** Opcode 0x0f 0x01 /2. */
1245FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1246{
1247 IEMOP_MNEMONIC(lgdt, "lgdt");
1248 IEMOP_HLP_64BIT_OP_SIZE();
1249 IEM_MC_BEGIN(3, 1);
1250 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1251 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1252 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1255 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1256 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1257 IEM_MC_END();
1258 return VINF_SUCCESS;
1259}
1260
1261
1262/** Opcode 0x0f 0x01 0xd0. */
1263FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1264{
1265 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1266 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1267 {
1268 /** @todo r=ramshankar: We should use
1269 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1270 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1271 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1272 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1273 }
1274 return IEMOP_RAISE_INVALID_OPCODE();
1275}
1276
1277
1278/** Opcode 0x0f 0x01 0xd1. */
1279FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1280{
1281 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1282 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1283 {
1284 /** @todo r=ramshankar: We should use
1285 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1286 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1287 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1288 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1289 }
1290 return IEMOP_RAISE_INVALID_OPCODE();
1291}
1292
1293
1294/** Opcode 0x0f 0x01 /3. */
1295FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1296{
1297 IEMOP_MNEMONIC(lidt, "lidt");
1298 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1299 ? IEMMODE_64BIT
1300 : pVCpu->iem.s.enmEffOpSize;
1301 IEM_MC_BEGIN(3, 1);
1302 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1303 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1304 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1307 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1308 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1309 IEM_MC_END();
1310 return VINF_SUCCESS;
1311}
1312
1313
1314/** Opcode 0x0f 0x01 0xd8. */
1315#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1316FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1317{
1318 IEMOP_MNEMONIC(vmrun, "vmrun");
1319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1321}
1322#else
1323FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1324#endif
1325
1326/** Opcode 0x0f 0x01 0xd9. */
1327FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1328{
1329 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1331
1332 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1333 want all hypercalls regardless of instruction used, and if a
1334 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1335 (NEM/win makes ASSUMPTIONS about this behavior.) */
1336 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1337}
1338
1339/** Opcode 0x0f 0x01 0xda. */
1340#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1341FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1342{
1343 IEMOP_MNEMONIC(vmload, "vmload");
1344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1345 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1346}
1347#else
1348FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1349#endif
1350
1351
1352/** Opcode 0x0f 0x01 0xdb. */
1353#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1354FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1355{
1356 IEMOP_MNEMONIC(vmsave, "vmsave");
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1358 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1359}
1360#else
1361FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1362#endif
1363
1364
1365/** Opcode 0x0f 0x01 0xdc. */
1366#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1367FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1368{
1369 IEMOP_MNEMONIC(stgi, "stgi");
1370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1371 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1372}
1373#else
1374FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1375#endif
1376
1377
1378/** Opcode 0x0f 0x01 0xdd. */
1379#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1380FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1381{
1382 IEMOP_MNEMONIC(clgi, "clgi");
1383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1384 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1385}
1386#else
1387FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1388#endif
1389
1390
1391/** Opcode 0x0f 0x01 0xdf. */
1392#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1393FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1394{
1395 IEMOP_MNEMONIC(invlpga, "invlpga");
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1397 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1398}
1399#else
1400FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1401#endif
1402
1403
1404/** Opcode 0x0f 0x01 0xde. */
1405#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1406FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1407{
1408 IEMOP_MNEMONIC(skinit, "skinit");
1409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1410 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1411}
1412#else
1413FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1414#endif
1415
1416
1417/** Opcode 0x0f 0x01 /4. */
1418FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1419{
1420 IEMOP_MNEMONIC(smsw, "smsw");
1421 IEMOP_HLP_MIN_286();
1422 if (IEM_IS_MODRM_REG_MODE(bRm))
1423 {
1424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1425 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1426 }
1427
1428 /* Ignore operand size here, memory refs are always 16-bit. */
1429 IEM_MC_BEGIN(2, 0);
1430 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1431 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1435 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1436 IEM_MC_END();
1437 return VINF_SUCCESS;
1438}
1439
1440
1441/** Opcode 0x0f 0x01 /6. */
1442FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1443{
1444 /* The operand size is effectively ignored, all is 16-bit and only the
1445 lower 3-bits are used. */
1446 IEMOP_MNEMONIC(lmsw, "lmsw");
1447 IEMOP_HLP_MIN_286();
1448 if (IEM_IS_MODRM_REG_MODE(bRm))
1449 {
1450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1451 IEM_MC_BEGIN(2, 0);
1452 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1453 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1454 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1455 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1456 IEM_MC_END();
1457 }
1458 else
1459 {
1460 IEM_MC_BEGIN(2, 0);
1461 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1462 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1465 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1466 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1467 IEM_MC_END();
1468 }
1469 return VINF_SUCCESS;
1470}
1471
1472
1473/** Opcode 0x0f 0x01 /7. */
1474FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1475{
1476 IEMOP_MNEMONIC(invlpg, "invlpg");
1477 IEMOP_HLP_MIN_486();
1478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1479 IEM_MC_BEGIN(1, 1);
1480 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1482 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1483 IEM_MC_END();
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/** Opcode 0x0f 0x01 /7. */
1489FNIEMOP_DEF(iemOp_Grp7_swapgs)
1490{
1491 IEMOP_MNEMONIC(swapgs, "swapgs");
1492 IEMOP_HLP_ONLY_64BIT();
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1494 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1495}
1496
1497
1498/** Opcode 0x0f 0x01 /7. */
1499FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1500{
1501 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1503 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1504}
1505
1506
1507/**
1508 * Group 7 jump table, memory variant.
1509 */
1510IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1511{
1512 iemOp_Grp7_sgdt,
1513 iemOp_Grp7_sidt,
1514 iemOp_Grp7_lgdt,
1515 iemOp_Grp7_lidt,
1516 iemOp_Grp7_smsw,
1517 iemOp_InvalidWithRM,
1518 iemOp_Grp7_lmsw,
1519 iemOp_Grp7_invlpg
1520};
1521
1522
1523/** Opcode 0x0f 0x01. */
1524FNIEMOP_DEF(iemOp_Grp7)
1525{
1526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1527 if (IEM_IS_MODRM_MEM_MODE(bRm))
1528 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1529
1530 switch (IEM_GET_MODRM_REG_8(bRm))
1531 {
1532 case 0:
1533 switch (IEM_GET_MODRM_RM_8(bRm))
1534 {
1535 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1536 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1537 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1538 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1539 }
1540 return IEMOP_RAISE_INVALID_OPCODE();
1541
1542 case 1:
1543 switch (IEM_GET_MODRM_RM_8(bRm))
1544 {
1545 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1546 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1547 }
1548 return IEMOP_RAISE_INVALID_OPCODE();
1549
1550 case 2:
1551 switch (IEM_GET_MODRM_RM_8(bRm))
1552 {
1553 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1554 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1555 }
1556 return IEMOP_RAISE_INVALID_OPCODE();
1557
1558 case 3:
1559 switch (IEM_GET_MODRM_RM_8(bRm))
1560 {
1561 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1562 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1563 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1564 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1565 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1566 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1567 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1568 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1570 }
1571
1572 case 4:
1573 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1574
1575 case 5:
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577
1578 case 6:
1579 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1580
1581 case 7:
1582 switch (IEM_GET_MODRM_RM_8(bRm))
1583 {
1584 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1585 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1586 }
1587 return IEMOP_RAISE_INVALID_OPCODE();
1588
1589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1590 }
1591}
1592
1593/** Opcode 0x0f 0x00 /3. */
1594FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1595{
1596 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598
1599 if (IEM_IS_MODRM_REG_MODE(bRm))
1600 {
1601 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1602 switch (pVCpu->iem.s.enmEffOpSize)
1603 {
1604 case IEMMODE_16BIT:
1605 {
1606 IEM_MC_BEGIN(3, 0);
1607 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1608 IEM_MC_ARG(uint16_t, u16Sel, 1);
1609 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1610
1611 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1612 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1613 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1614
1615 IEM_MC_END();
1616 return VINF_SUCCESS;
1617 }
1618
1619 case IEMMODE_32BIT:
1620 case IEMMODE_64BIT:
1621 {
1622 IEM_MC_BEGIN(3, 0);
1623 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1624 IEM_MC_ARG(uint16_t, u16Sel, 1);
1625 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1626
1627 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1628 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1629 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1630
1631 IEM_MC_END();
1632 return VINF_SUCCESS;
1633 }
1634
1635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1636 }
1637 }
1638 else
1639 {
1640 switch (pVCpu->iem.s.enmEffOpSize)
1641 {
1642 case IEMMODE_16BIT:
1643 {
1644 IEM_MC_BEGIN(3, 1);
1645 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1646 IEM_MC_ARG(uint16_t, u16Sel, 1);
1647 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1649
1650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1651 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1652
1653 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1654 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1655 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1656
1657 IEM_MC_END();
1658 return VINF_SUCCESS;
1659 }
1660
1661 case IEMMODE_32BIT:
1662 case IEMMODE_64BIT:
1663 {
1664 IEM_MC_BEGIN(3, 1);
1665 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1666 IEM_MC_ARG(uint16_t, u16Sel, 1);
1667 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1669
1670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1671 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1672/** @todo testcase: make sure it's a 16-bit read. */
1673
1674 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1675 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1676 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1677
1678 IEM_MC_END();
1679 return VINF_SUCCESS;
1680 }
1681
1682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1683 }
1684 }
1685}
1686
1687
1688
1689/** Opcode 0x0f 0x02. */
1690FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1691{
1692 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1693 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1694}
1695
1696
1697/** Opcode 0x0f 0x03. */
1698FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1699{
1700 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1701 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1702}
1703
1704
1705/** Opcode 0x0f 0x05. */
1706FNIEMOP_DEF(iemOp_syscall)
1707{
1708 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1710 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1711}
1712
1713
1714/** Opcode 0x0f 0x06. */
1715FNIEMOP_DEF(iemOp_clts)
1716{
1717 IEMOP_MNEMONIC(clts, "clts");
1718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1719 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1720}
1721
1722
1723/** Opcode 0x0f 0x07. */
1724FNIEMOP_DEF(iemOp_sysret)
1725{
1726 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1728 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1729}
1730
1731
1732/** Opcode 0x0f 0x08. */
1733FNIEMOP_DEF(iemOp_invd)
1734{
1735 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1736 IEMOP_HLP_MIN_486();
1737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1738 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1739}
1740
1741
1742/** Opcode 0x0f 0x09. */
1743FNIEMOP_DEF(iemOp_wbinvd)
1744{
1745 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1746 IEMOP_HLP_MIN_486();
1747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1748 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1749}
1750
1751
1752/** Opcode 0x0f 0x0b. */
1753FNIEMOP_DEF(iemOp_ud2)
1754{
1755 IEMOP_MNEMONIC(ud2, "ud2");
1756 return IEMOP_RAISE_INVALID_OPCODE();
1757}
1758
1759/** Opcode 0x0f 0x0d. */
1760FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1761{
1762 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1763 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1764 {
1765 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767 }
1768
1769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1770 if (IEM_IS_MODRM_REG_MODE(bRm))
1771 {
1772 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1773 return IEMOP_RAISE_INVALID_OPCODE();
1774 }
1775
1776 switch (IEM_GET_MODRM_REG_8(bRm))
1777 {
1778 case 2: /* Aliased to /0 for the time being. */
1779 case 4: /* Aliased to /0 for the time being. */
1780 case 5: /* Aliased to /0 for the time being. */
1781 case 6: /* Aliased to /0 for the time being. */
1782 case 7: /* Aliased to /0 for the time being. */
1783 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1784 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1785 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1787 }
1788
1789 IEM_MC_BEGIN(0, 1);
1790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1793 /* Currently a NOP. */
1794 NOREF(GCPtrEffSrc);
1795 IEM_MC_ADVANCE_RIP();
1796 IEM_MC_END();
1797 return VINF_SUCCESS;
1798}
1799
1800
1801/** Opcode 0x0f 0x0e. */
1802FNIEMOP_DEF(iemOp_femms)
1803{
1804 IEMOP_MNEMONIC(femms, "femms");
1805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1806
1807 IEM_MC_BEGIN(0,0);
1808 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1809 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1810 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1811 IEM_MC_FPU_FROM_MMX_MODE();
1812 IEM_MC_ADVANCE_RIP();
1813 IEM_MC_END();
1814 return VINF_SUCCESS;
1815}
1816
1817
1818/** Opcode 0x0f 0x0f. */
1819FNIEMOP_DEF(iemOp_3Dnow)
1820{
1821 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1822 {
1823 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1824 return IEMOP_RAISE_INVALID_OPCODE();
1825 }
1826
1827#ifdef IEM_WITH_3DNOW
1828 /* This is pretty sparse, use switch instead of table. */
1829 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1830 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1831#else
1832 IEMOP_BITCH_ABOUT_STUB();
1833 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1834#endif
1835}
1836
1837
1838/**
1839 * @opcode 0x10
1840 * @oppfx none
1841 * @opcpuid sse
1842 * @opgroup og_sse_simdfp_datamove
1843 * @opxcpttype 4UA
1844 * @optest op1=1 op2=2 -> op1=2
1845 * @optest op1=0 op2=-22 -> op1=-22
1846 */
1847FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1848{
1849 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1851 if (IEM_IS_MODRM_REG_MODE(bRm))
1852 {
1853 /*
1854 * Register, register.
1855 */
1856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1857 IEM_MC_BEGIN(0, 0);
1858 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1859 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1860 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1861 IEM_GET_MODRM_RM(pVCpu, bRm));
1862 IEM_MC_ADVANCE_RIP();
1863 IEM_MC_END();
1864 }
1865 else
1866 {
1867 /*
1868 * Memory, register.
1869 */
1870 IEM_MC_BEGIN(0, 2);
1871 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1873
1874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1876 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1877 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1878
1879 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1880 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1881
1882 IEM_MC_ADVANCE_RIP();
1883 IEM_MC_END();
1884 }
1885 return VINF_SUCCESS;
1886
1887}
1888
1889
1890/**
1891 * @opcode 0x10
1892 * @oppfx 0x66
1893 * @opcpuid sse2
1894 * @opgroup og_sse2_pcksclr_datamove
1895 * @opxcpttype 4UA
1896 * @optest op1=1 op2=2 -> op1=2
1897 * @optest op1=0 op2=-42 -> op1=-42
1898 */
1899FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
1900{
1901 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1903 if (IEM_IS_MODRM_REG_MODE(bRm))
1904 {
1905 /*
1906 * Register, register.
1907 */
1908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1909 IEM_MC_BEGIN(0, 0);
1910 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1911 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1912 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1913 IEM_GET_MODRM_RM(pVCpu, bRm));
1914 IEM_MC_ADVANCE_RIP();
1915 IEM_MC_END();
1916 }
1917 else
1918 {
1919 /*
1920 * Memory, register.
1921 */
1922 IEM_MC_BEGIN(0, 2);
1923 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1929 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1930
1931 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1932 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1933
1934 IEM_MC_ADVANCE_RIP();
1935 IEM_MC_END();
1936 }
1937 return VINF_SUCCESS;
1938}
1939
1940
1941/**
1942 * @opcode 0x10
1943 * @oppfx 0xf3
1944 * @opcpuid sse
1945 * @opgroup og_sse_simdfp_datamove
1946 * @opxcpttype 5
1947 * @optest op1=1 op2=2 -> op1=2
1948 * @optest op1=0 op2=-22 -> op1=-22
1949 */
1950FNIEMOP_DEF(iemOp_movss_Vss_Wss)
1951{
1952 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1954 if (IEM_IS_MODRM_REG_MODE(bRm))
1955 {
1956 /*
1957 * Register, register.
1958 */
1959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1960 IEM_MC_BEGIN(0, 1);
1961 IEM_MC_LOCAL(uint32_t, uSrc);
1962
1963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1964 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1965 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1966 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1967
1968 IEM_MC_ADVANCE_RIP();
1969 IEM_MC_END();
1970 }
1971 else
1972 {
1973 /*
1974 * Memory, register.
1975 */
1976 IEM_MC_BEGIN(0, 2);
1977 IEM_MC_LOCAL(uint32_t, uSrc);
1978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1979
1980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1983 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1984
1985 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1986 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1987
1988 IEM_MC_ADVANCE_RIP();
1989 IEM_MC_END();
1990 }
1991 return VINF_SUCCESS;
1992}
1993
1994
1995/**
1996 * @opcode 0x10
1997 * @oppfx 0xf2
1998 * @opcpuid sse2
1999 * @opgroup og_sse2_pcksclr_datamove
2000 * @opxcpttype 5
2001 * @optest op1=1 op2=2 -> op1=2
2002 * @optest op1=0 op2=-42 -> op1=-42
2003 */
2004FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2005{
2006 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2008 if (IEM_IS_MODRM_REG_MODE(bRm))
2009 {
2010 /*
2011 * Register, register.
2012 */
2013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2014 IEM_MC_BEGIN(0, 1);
2015 IEM_MC_LOCAL(uint64_t, uSrc);
2016
2017 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2019 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2020 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2021
2022 IEM_MC_ADVANCE_RIP();
2023 IEM_MC_END();
2024 }
2025 else
2026 {
2027 /*
2028 * Memory, register.
2029 */
2030 IEM_MC_BEGIN(0, 2);
2031 IEM_MC_LOCAL(uint64_t, uSrc);
2032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2033
2034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2037 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2038
2039 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2040 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2041
2042 IEM_MC_ADVANCE_RIP();
2043 IEM_MC_END();
2044 }
2045 return VINF_SUCCESS;
2046}
2047
2048
2049/**
2050 * @opcode 0x11
2051 * @oppfx none
2052 * @opcpuid sse
2053 * @opgroup og_sse_simdfp_datamove
2054 * @opxcpttype 4UA
2055 * @optest op1=1 op2=2 -> op1=2
2056 * @optest op1=0 op2=-42 -> op1=-42
2057 */
2058FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2059{
2060 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2062 if (IEM_IS_MODRM_REG_MODE(bRm))
2063 {
2064 /*
2065 * Register, register.
2066 */
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068 IEM_MC_BEGIN(0, 0);
2069 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2070 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2071 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2072 IEM_GET_MODRM_REG(pVCpu, bRm));
2073 IEM_MC_ADVANCE_RIP();
2074 IEM_MC_END();
2075 }
2076 else
2077 {
2078 /*
2079 * Memory, register.
2080 */
2081 IEM_MC_BEGIN(0, 2);
2082 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2084
2085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2087 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2088 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2089
2090 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2091 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2092
2093 IEM_MC_ADVANCE_RIP();
2094 IEM_MC_END();
2095 }
2096 return VINF_SUCCESS;
2097}
2098
2099
2100/**
2101 * @opcode 0x11
2102 * @oppfx 0x66
2103 * @opcpuid sse2
2104 * @opgroup og_sse2_pcksclr_datamove
2105 * @opxcpttype 4UA
2106 * @optest op1=1 op2=2 -> op1=2
2107 * @optest op1=0 op2=-42 -> op1=-42
2108 */
2109FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2110{
2111 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2113 if (IEM_IS_MODRM_REG_MODE(bRm))
2114 {
2115 /*
2116 * Register, register.
2117 */
2118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2119 IEM_MC_BEGIN(0, 0);
2120 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2121 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2122 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2123 IEM_GET_MODRM_REG(pVCpu, bRm));
2124 IEM_MC_ADVANCE_RIP();
2125 IEM_MC_END();
2126 }
2127 else
2128 {
2129 /*
2130 * Memory, register.
2131 */
2132 IEM_MC_BEGIN(0, 2);
2133 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2135
2136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2138 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2140
2141 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2142 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2143
2144 IEM_MC_ADVANCE_RIP();
2145 IEM_MC_END();
2146 }
2147 return VINF_SUCCESS;
2148}
2149
2150
2151/**
2152 * @opcode 0x11
2153 * @oppfx 0xf3
2154 * @opcpuid sse
2155 * @opgroup og_sse_simdfp_datamove
2156 * @opxcpttype 5
2157 * @optest op1=1 op2=2 -> op1=2
2158 * @optest op1=0 op2=-22 -> op1=-22
2159 */
2160FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2161{
2162 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2164 if (IEM_IS_MODRM_REG_MODE(bRm))
2165 {
2166 /*
2167 * Register, register.
2168 */
2169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2170 IEM_MC_BEGIN(0, 1);
2171 IEM_MC_LOCAL(uint32_t, uSrc);
2172
2173 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2174 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2175 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2176 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2177
2178 IEM_MC_ADVANCE_RIP();
2179 IEM_MC_END();
2180 }
2181 else
2182 {
2183 /*
2184 * Memory, register.
2185 */
2186 IEM_MC_BEGIN(0, 2);
2187 IEM_MC_LOCAL(uint32_t, uSrc);
2188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2189
2190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2192 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2193 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2194
2195 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2196 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2197
2198 IEM_MC_ADVANCE_RIP();
2199 IEM_MC_END();
2200 }
2201 return VINF_SUCCESS;
2202}
2203
2204
2205/**
2206 * @opcode 0x11
2207 * @oppfx 0xf2
2208 * @opcpuid sse2
2209 * @opgroup og_sse2_pcksclr_datamove
2210 * @opxcpttype 5
2211 * @optest op1=1 op2=2 -> op1=2
2212 * @optest op1=0 op2=-42 -> op1=-42
2213 */
2214FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2215{
2216 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2218 if (IEM_IS_MODRM_REG_MODE(bRm))
2219 {
2220 /*
2221 * Register, register.
2222 */
2223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2224 IEM_MC_BEGIN(0, 1);
2225 IEM_MC_LOCAL(uint64_t, uSrc);
2226
2227 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2228 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2229 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2230 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2231
2232 IEM_MC_ADVANCE_RIP();
2233 IEM_MC_END();
2234 }
2235 else
2236 {
2237 /*
2238 * Memory, register.
2239 */
2240 IEM_MC_BEGIN(0, 2);
2241 IEM_MC_LOCAL(uint64_t, uSrc);
2242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2243
2244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2246 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2247 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2248
2249 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2250 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2251
2252 IEM_MC_ADVANCE_RIP();
2253 IEM_MC_END();
2254 }
2255 return VINF_SUCCESS;
2256}
2257
2258
2259FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2260{
2261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2262 if (IEM_IS_MODRM_REG_MODE(bRm))
2263 {
2264 /**
2265 * @opcode 0x12
2266 * @opcodesub 11 mr/reg
2267 * @oppfx none
2268 * @opcpuid sse
2269 * @opgroup og_sse_simdfp_datamove
2270 * @opxcpttype 5
2271 * @optest op1=1 op2=2 -> op1=2
2272 * @optest op1=0 op2=-42 -> op1=-42
2273 */
2274 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2275
2276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2277 IEM_MC_BEGIN(0, 1);
2278 IEM_MC_LOCAL(uint64_t, uSrc);
2279
2280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2281 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2282 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2283 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2284
2285 IEM_MC_ADVANCE_RIP();
2286 IEM_MC_END();
2287 }
2288 else
2289 {
2290 /**
2291 * @opdone
2292 * @opcode 0x12
2293 * @opcodesub !11 mr/reg
2294 * @oppfx none
2295 * @opcpuid sse
2296 * @opgroup og_sse_simdfp_datamove
2297 * @opxcpttype 5
2298 * @optest op1=1 op2=2 -> op1=2
2299 * @optest op1=0 op2=-42 -> op1=-42
2300 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2301 */
2302 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2303
2304 IEM_MC_BEGIN(0, 2);
2305 IEM_MC_LOCAL(uint64_t, uSrc);
2306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2307
2308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2312
2313 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2314 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2315
2316 IEM_MC_ADVANCE_RIP();
2317 IEM_MC_END();
2318 }
2319 return VINF_SUCCESS;
2320}
2321
2322
2323/**
2324 * @opcode 0x12
2325 * @opcodesub !11 mr/reg
2326 * @oppfx 0x66
2327 * @opcpuid sse2
2328 * @opgroup og_sse2_pcksclr_datamove
2329 * @opxcpttype 5
2330 * @optest op1=1 op2=2 -> op1=2
2331 * @optest op1=0 op2=-42 -> op1=-42
2332 */
2333FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2334{
2335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2336 if (IEM_IS_MODRM_MEM_MODE(bRm))
2337 {
2338 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2339
2340 IEM_MC_BEGIN(0, 2);
2341 IEM_MC_LOCAL(uint64_t, uSrc);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343
2344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2346 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2347 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2348
2349 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2350 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2351
2352 IEM_MC_ADVANCE_RIP();
2353 IEM_MC_END();
2354 return VINF_SUCCESS;
2355 }
2356
2357 /**
2358 * @opdone
2359 * @opmnemonic ud660f12m3
2360 * @opcode 0x12
2361 * @opcodesub 11 mr/reg
2362 * @oppfx 0x66
2363 * @opunused immediate
2364 * @opcpuid sse
2365 * @optest ->
2366 */
2367 return IEMOP_RAISE_INVALID_OPCODE();
2368}
2369
2370
2371/**
2372 * @opcode 0x12
2373 * @oppfx 0xf3
2374 * @opcpuid sse3
2375 * @opgroup og_sse3_pcksclr_datamove
2376 * @opxcpttype 4
2377 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2378 * op1=0x00000002000000020000000100000001
2379 */
2380FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2381{
2382 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2384 if (IEM_IS_MODRM_REG_MODE(bRm))
2385 {
2386 /*
2387 * Register, register.
2388 */
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2390 IEM_MC_BEGIN(2, 0);
2391 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2392 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2393
2394 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2395 IEM_MC_PREPARE_SSE_USAGE();
2396
2397 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2398 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2399 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2400
2401 IEM_MC_ADVANCE_RIP();
2402 IEM_MC_END();
2403 }
2404 else
2405 {
2406 /*
2407 * Register, memory.
2408 */
2409 IEM_MC_BEGIN(2, 2);
2410 IEM_MC_LOCAL(RTUINT128U, uSrc);
2411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2412 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2413 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2414
2415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2417 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2418 IEM_MC_PREPARE_SSE_USAGE();
2419
2420 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2421 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2423
2424 IEM_MC_ADVANCE_RIP();
2425 IEM_MC_END();
2426 }
2427 return VINF_SUCCESS;
2428}
2429
2430
2431/**
2432 * @opcode 0x12
2433 * @oppfx 0xf2
2434 * @opcpuid sse3
2435 * @opgroup og_sse3_pcksclr_datamove
2436 * @opxcpttype 5
2437 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2438 * op1=0x22222222111111112222222211111111
2439 */
2440FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2441{
2442 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2444 if (IEM_IS_MODRM_REG_MODE(bRm))
2445 {
2446 /*
2447 * Register, register.
2448 */
2449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2450 IEM_MC_BEGIN(2, 0);
2451 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2452 IEM_MC_ARG(uint64_t, uSrc, 1);
2453
2454 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2455 IEM_MC_PREPARE_SSE_USAGE();
2456
2457 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2458 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2459 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2460
2461 IEM_MC_ADVANCE_RIP();
2462 IEM_MC_END();
2463 }
2464 else
2465 {
2466 /*
2467 * Register, memory.
2468 */
2469 IEM_MC_BEGIN(2, 2);
2470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2471 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2472 IEM_MC_ARG(uint64_t, uSrc, 1);
2473
2474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2476 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2477 IEM_MC_PREPARE_SSE_USAGE();
2478
2479 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2480 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2481 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2482
2483 IEM_MC_ADVANCE_RIP();
2484 IEM_MC_END();
2485 }
2486 return VINF_SUCCESS;
2487}
2488
2489
2490/**
2491 * @opcode 0x13
2492 * @opcodesub !11 mr/reg
2493 * @oppfx none
2494 * @opcpuid sse
2495 * @opgroup og_sse_simdfp_datamove
2496 * @opxcpttype 5
2497 * @optest op1=1 op2=2 -> op1=2
2498 * @optest op1=0 op2=-42 -> op1=-42
2499 */
2500FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2501{
2502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2503 if (IEM_IS_MODRM_MEM_MODE(bRm))
2504 {
2505 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2506
2507 IEM_MC_BEGIN(0, 2);
2508 IEM_MC_LOCAL(uint64_t, uSrc);
2509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2510
2511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2515
2516 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2517 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2518
2519 IEM_MC_ADVANCE_RIP();
2520 IEM_MC_END();
2521 return VINF_SUCCESS;
2522 }
2523
2524 /**
2525 * @opdone
2526 * @opmnemonic ud0f13m3
2527 * @opcode 0x13
2528 * @opcodesub 11 mr/reg
2529 * @oppfx none
2530 * @opunused immediate
2531 * @opcpuid sse
2532 * @optest ->
2533 */
2534 return IEMOP_RAISE_INVALID_OPCODE();
2535}
2536
2537
2538/**
2539 * @opcode 0x13
2540 * @opcodesub !11 mr/reg
2541 * @oppfx 0x66
2542 * @opcpuid sse2
2543 * @opgroup og_sse2_pcksclr_datamove
2544 * @opxcpttype 5
2545 * @optest op1=1 op2=2 -> op1=2
2546 * @optest op1=0 op2=-42 -> op1=-42
2547 */
2548FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2549{
2550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2551 if (IEM_IS_MODRM_MEM_MODE(bRm))
2552 {
2553 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2554 IEM_MC_BEGIN(0, 2);
2555 IEM_MC_LOCAL(uint64_t, uSrc);
2556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2557
2558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2560 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2561 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2562
2563 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2564 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2565
2566 IEM_MC_ADVANCE_RIP();
2567 IEM_MC_END();
2568 return VINF_SUCCESS;
2569 }
2570
2571 /**
2572 * @opdone
2573 * @opmnemonic ud660f13m3
2574 * @opcode 0x13
2575 * @opcodesub 11 mr/reg
2576 * @oppfx 0x66
2577 * @opunused immediate
2578 * @opcpuid sse
2579 * @optest ->
2580 */
2581 return IEMOP_RAISE_INVALID_OPCODE();
2582}
2583
2584
2585/**
2586 * @opmnemonic udf30f13
2587 * @opcode 0x13
2588 * @oppfx 0xf3
2589 * @opunused intel-modrm
2590 * @opcpuid sse
2591 * @optest ->
2592 * @opdone
2593 */
2594
2595/**
2596 * @opmnemonic udf20f13
2597 * @opcode 0x13
2598 * @oppfx 0xf2
2599 * @opunused intel-modrm
2600 * @opcpuid sse
2601 * @optest ->
2602 * @opdone
2603 */
2604
2605/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2606FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2607{
2608 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2609 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2610}
2611
2612
2613/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2614FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2615{
2616 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2617 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2618}
2619
2620
2621/**
2622 * @opdone
2623 * @opmnemonic udf30f14
2624 * @opcode 0x14
2625 * @oppfx 0xf3
2626 * @opunused intel-modrm
2627 * @opcpuid sse
2628 * @optest ->
2629 * @opdone
2630 */
2631
2632/**
2633 * @opmnemonic udf20f14
2634 * @opcode 0x14
2635 * @oppfx 0xf2
2636 * @opunused intel-modrm
2637 * @opcpuid sse
2638 * @optest ->
2639 * @opdone
2640 */
2641
2642/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2643FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2644{
2645 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2646 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2647}
2648
2649
2650/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2651FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2652{
2653 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2654 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2655}
2656
2657
2658/* Opcode 0xf3 0x0f 0x15 - invalid */
2659/* Opcode 0xf2 0x0f 0x15 - invalid */
2660
2661/**
2662 * @opdone
2663 * @opmnemonic udf30f15
2664 * @opcode 0x15
2665 * @oppfx 0xf3
2666 * @opunused intel-modrm
2667 * @opcpuid sse
2668 * @optest ->
2669 * @opdone
2670 */
2671
2672/**
2673 * @opmnemonic udf20f15
2674 * @opcode 0x15
2675 * @oppfx 0xf2
2676 * @opunused intel-modrm
2677 * @opcpuid sse
2678 * @optest ->
2679 * @opdone
2680 */
2681
2682FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2683{
2684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2685 if (IEM_IS_MODRM_REG_MODE(bRm))
2686 {
2687 /**
2688 * @opcode 0x16
2689 * @opcodesub 11 mr/reg
2690 * @oppfx none
2691 * @opcpuid sse
2692 * @opgroup og_sse_simdfp_datamove
2693 * @opxcpttype 5
2694 * @optest op1=1 op2=2 -> op1=2
2695 * @optest op1=0 op2=-42 -> op1=-42
2696 */
2697 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2698
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(0, 1);
2701 IEM_MC_LOCAL(uint64_t, uSrc);
2702
2703 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2704 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2705 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2706 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2707
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /**
2714 * @opdone
2715 * @opcode 0x16
2716 * @opcodesub !11 mr/reg
2717 * @oppfx none
2718 * @opcpuid sse
2719 * @opgroup og_sse_simdfp_datamove
2720 * @opxcpttype 5
2721 * @optest op1=1 op2=2 -> op1=2
2722 * @optest op1=0 op2=-42 -> op1=-42
2723 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2724 */
2725 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2726
2727 IEM_MC_BEGIN(0, 2);
2728 IEM_MC_LOCAL(uint64_t, uSrc);
2729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2730
2731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2733 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2734 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2735
2736 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2737 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2738
2739 IEM_MC_ADVANCE_RIP();
2740 IEM_MC_END();
2741 }
2742 return VINF_SUCCESS;
2743}
2744
2745
2746/**
2747 * @opcode 0x16
2748 * @opcodesub !11 mr/reg
2749 * @oppfx 0x66
2750 * @opcpuid sse2
2751 * @opgroup og_sse2_pcksclr_datamove
2752 * @opxcpttype 5
2753 * @optest op1=1 op2=2 -> op1=2
2754 * @optest op1=0 op2=-42 -> op1=-42
2755 */
2756FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2757{
2758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2759 if (IEM_IS_MODRM_MEM_MODE(bRm))
2760 {
2761 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2762 IEM_MC_BEGIN(0, 2);
2763 IEM_MC_LOCAL(uint64_t, uSrc);
2764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2765
2766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2768 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2770
2771 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2772 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2773
2774 IEM_MC_ADVANCE_RIP();
2775 IEM_MC_END();
2776 return VINF_SUCCESS;
2777 }
2778
2779 /**
2780 * @opdone
2781 * @opmnemonic ud660f16m3
2782 * @opcode 0x16
2783 * @opcodesub 11 mr/reg
2784 * @oppfx 0x66
2785 * @opunused immediate
2786 * @opcpuid sse
2787 * @optest ->
2788 */
2789 return IEMOP_RAISE_INVALID_OPCODE();
2790}
2791
2792
2793/**
2794 * @opcode 0x16
2795 * @oppfx 0xf3
2796 * @opcpuid sse3
2797 * @opgroup og_sse3_pcksclr_datamove
2798 * @opxcpttype 4
2799 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2800 * op1=0x00000002000000020000000100000001
2801 */
2802FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2803{
2804 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2806 if (IEM_IS_MODRM_REG_MODE(bRm))
2807 {
2808 /*
2809 * Register, register.
2810 */
2811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2812 IEM_MC_BEGIN(2, 0);
2813 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2814 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2815
2816 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2817 IEM_MC_PREPARE_SSE_USAGE();
2818
2819 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2820 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2821 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2822
2823 IEM_MC_ADVANCE_RIP();
2824 IEM_MC_END();
2825 }
2826 else
2827 {
2828 /*
2829 * Register, memory.
2830 */
2831 IEM_MC_BEGIN(2, 2);
2832 IEM_MC_LOCAL(RTUINT128U, uSrc);
2833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2834 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2835 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2836
2837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2839 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2840 IEM_MC_PREPARE_SSE_USAGE();
2841
2842 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2843 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2844 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2845
2846 IEM_MC_ADVANCE_RIP();
2847 IEM_MC_END();
2848 }
2849 return VINF_SUCCESS;
2850}
2851
2852/**
2853 * @opdone
2854 * @opmnemonic udf30f16
2855 * @opcode 0x16
2856 * @oppfx 0xf2
2857 * @opunused intel-modrm
2858 * @opcpuid sse
2859 * @optest ->
2860 * @opdone
2861 */
2862
2863
2864/**
2865 * @opcode 0x17
2866 * @opcodesub !11 mr/reg
2867 * @oppfx none
2868 * @opcpuid sse
2869 * @opgroup og_sse_simdfp_datamove
2870 * @opxcpttype 5
2871 * @optest op1=1 op2=2 -> op1=2
2872 * @optest op1=0 op2=-42 -> op1=-42
2873 */
2874FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
2875{
2876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2877 if (IEM_IS_MODRM_MEM_MODE(bRm))
2878 {
2879 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2880
2881 IEM_MC_BEGIN(0, 2);
2882 IEM_MC_LOCAL(uint64_t, uSrc);
2883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2884
2885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2887 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2888 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2889
2890 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2891 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2892
2893 IEM_MC_ADVANCE_RIP();
2894 IEM_MC_END();
2895 return VINF_SUCCESS;
2896 }
2897
2898 /**
2899 * @opdone
2900 * @opmnemonic ud0f17m3
2901 * @opcode 0x17
2902 * @opcodesub 11 mr/reg
2903 * @oppfx none
2904 * @opunused immediate
2905 * @opcpuid sse
2906 * @optest ->
2907 */
2908 return IEMOP_RAISE_INVALID_OPCODE();
2909}
2910
2911
2912/**
2913 * @opcode 0x17
2914 * @opcodesub !11 mr/reg
2915 * @oppfx 0x66
2916 * @opcpuid sse2
2917 * @opgroup og_sse2_pcksclr_datamove
2918 * @opxcpttype 5
2919 * @optest op1=1 op2=2 -> op1=2
2920 * @optest op1=0 op2=-42 -> op1=-42
2921 */
2922FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
2923{
2924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2925 if (IEM_IS_MODRM_MEM_MODE(bRm))
2926 {
2927 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2928
2929 IEM_MC_BEGIN(0, 2);
2930 IEM_MC_LOCAL(uint64_t, uSrc);
2931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2932
2933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2936 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2937
2938 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2939 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2940
2941 IEM_MC_ADVANCE_RIP();
2942 IEM_MC_END();
2943 return VINF_SUCCESS;
2944 }
2945
2946 /**
2947 * @opdone
2948 * @opmnemonic ud660f17m3
2949 * @opcode 0x17
2950 * @opcodesub 11 mr/reg
2951 * @oppfx 0x66
2952 * @opunused immediate
2953 * @opcpuid sse
2954 * @optest ->
2955 */
2956 return IEMOP_RAISE_INVALID_OPCODE();
2957}
2958
2959
2960/**
2961 * @opdone
2962 * @opmnemonic udf30f17
2963 * @opcode 0x17
2964 * @oppfx 0xf3
2965 * @opunused intel-modrm
2966 * @opcpuid sse
2967 * @optest ->
2968 * @opdone
2969 */
2970
2971/**
2972 * @opmnemonic udf20f17
2973 * @opcode 0x17
2974 * @oppfx 0xf2
2975 * @opunused intel-modrm
2976 * @opcpuid sse
2977 * @optest ->
2978 * @opdone
2979 */
2980
2981
2982/** Opcode 0x0f 0x18. */
2983FNIEMOP_DEF(iemOp_prefetch_Grp16)
2984{
2985 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2986 if (IEM_IS_MODRM_MEM_MODE(bRm))
2987 {
2988 switch (IEM_GET_MODRM_REG_8(bRm))
2989 {
2990 case 4: /* Aliased to /0 for the time being according to AMD. */
2991 case 5: /* Aliased to /0 for the time being according to AMD. */
2992 case 6: /* Aliased to /0 for the time being according to AMD. */
2993 case 7: /* Aliased to /0 for the time being according to AMD. */
2994 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
2995 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
2996 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
2997 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
2998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2999 }
3000
3001 IEM_MC_BEGIN(0, 1);
3002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 /* Currently a NOP. */
3006 NOREF(GCPtrEffSrc);
3007 IEM_MC_ADVANCE_RIP();
3008 IEM_MC_END();
3009 return VINF_SUCCESS;
3010 }
3011
3012 return IEMOP_RAISE_INVALID_OPCODE();
3013}
3014
3015
3016/** Opcode 0x0f 0x19..0x1f. */
3017FNIEMOP_DEF(iemOp_nop_Ev)
3018{
3019 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3021 if (IEM_IS_MODRM_REG_MODE(bRm))
3022 {
3023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3024 IEM_MC_BEGIN(0, 0);
3025 IEM_MC_ADVANCE_RIP();
3026 IEM_MC_END();
3027 }
3028 else
3029 {
3030 IEM_MC_BEGIN(0, 1);
3031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034 /* Currently a NOP. */
3035 NOREF(GCPtrEffSrc);
3036 IEM_MC_ADVANCE_RIP();
3037 IEM_MC_END();
3038 }
3039 return VINF_SUCCESS;
3040}
3041
3042
3043/** Opcode 0x0f 0x20. */
3044FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3045{
3046 /* mod is ignored, as is operand size overrides. */
3047 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3048 IEMOP_HLP_MIN_386();
3049 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3050 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3051 else
3052 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3053
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3056 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3057 {
3058 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3059 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3060 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3061 iCrReg |= 8;
3062 }
3063 switch (iCrReg)
3064 {
3065 case 0: case 2: case 3: case 4: case 8:
3066 break;
3067 default:
3068 return IEMOP_RAISE_INVALID_OPCODE();
3069 }
3070 IEMOP_HLP_DONE_DECODING();
3071
3072 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3073}
3074
3075
3076/** Opcode 0x0f 0x21. */
3077FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3078{
3079 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3080 IEMOP_HLP_MIN_386();
3081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3083 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3084 return IEMOP_RAISE_INVALID_OPCODE();
3085 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3086 IEM_GET_MODRM_RM(pVCpu, bRm),
3087 IEM_GET_MODRM_REG_8(bRm));
3088}
3089
3090
3091/** Opcode 0x0f 0x22. */
3092FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3093{
3094 /* mod is ignored, as is operand size overrides. */
3095 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3096 IEMOP_HLP_MIN_386();
3097 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3098 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3099 else
3100 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3101
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3104 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3105 {
3106 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3107 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3108 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3109 iCrReg |= 8;
3110 }
3111 switch (iCrReg)
3112 {
3113 case 0: case 2: case 3: case 4: case 8:
3114 break;
3115 default:
3116 return IEMOP_RAISE_INVALID_OPCODE();
3117 }
3118 IEMOP_HLP_DONE_DECODING();
3119
3120 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3121}
3122
3123
3124/** Opcode 0x0f 0x23. */
3125FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3126{
3127 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3128 IEMOP_HLP_MIN_386();
3129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3131 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3132 return IEMOP_RAISE_INVALID_OPCODE();
3133 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3134 IEM_GET_MODRM_REG_8(bRm),
3135 IEM_GET_MODRM_RM(pVCpu, bRm));
3136}
3137
3138
3139/** Opcode 0x0f 0x24. */
3140FNIEMOP_DEF(iemOp_mov_Rd_Td)
3141{
3142 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3143 IEMOP_HLP_MIN_386();
3144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3146 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3147 return IEMOP_RAISE_INVALID_OPCODE();
3148 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3149 IEM_GET_MODRM_RM(pVCpu, bRm),
3150 IEM_GET_MODRM_REG_8(bRm));
3151}
3152
3153
3154/** Opcode 0x0f 0x26. */
3155FNIEMOP_DEF(iemOp_mov_Td_Rd)
3156{
3157 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3158 IEMOP_HLP_MIN_386();
3159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3162 return IEMOP_RAISE_INVALID_OPCODE();
3163 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3164 IEM_GET_MODRM_REG_8(bRm),
3165 IEM_GET_MODRM_RM(pVCpu, bRm));
3166}
3167
3168
3169/**
3170 * @opcode 0x28
3171 * @oppfx none
3172 * @opcpuid sse
3173 * @opgroup og_sse_simdfp_datamove
3174 * @opxcpttype 1
3175 * @optest op1=1 op2=2 -> op1=2
3176 * @optest op1=0 op2=-42 -> op1=-42
3177 */
3178FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3179{
3180 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3182 if (IEM_IS_MODRM_REG_MODE(bRm))
3183 {
3184 /*
3185 * Register, register.
3186 */
3187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3188 IEM_MC_BEGIN(0, 0);
3189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3191 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3192 IEM_GET_MODRM_RM(pVCpu, bRm));
3193 IEM_MC_ADVANCE_RIP();
3194 IEM_MC_END();
3195 }
3196 else
3197 {
3198 /*
3199 * Register, memory.
3200 */
3201 IEM_MC_BEGIN(0, 2);
3202 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3204
3205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3207 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3208 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3209
3210 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3211 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3212
3213 IEM_MC_ADVANCE_RIP();
3214 IEM_MC_END();
3215 }
3216 return VINF_SUCCESS;
3217}
3218
3219/**
3220 * @opcode 0x28
3221 * @oppfx 66
3222 * @opcpuid sse2
3223 * @opgroup og_sse2_pcksclr_datamove
3224 * @opxcpttype 1
3225 * @optest op1=1 op2=2 -> op1=2
3226 * @optest op1=0 op2=-42 -> op1=-42
3227 */
3228FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3229{
3230 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3232 if (IEM_IS_MODRM_REG_MODE(bRm))
3233 {
3234 /*
3235 * Register, register.
3236 */
3237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3238 IEM_MC_BEGIN(0, 0);
3239 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3240 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3241 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3242 IEM_GET_MODRM_RM(pVCpu, bRm));
3243 IEM_MC_ADVANCE_RIP();
3244 IEM_MC_END();
3245 }
3246 else
3247 {
3248 /*
3249 * Register, memory.
3250 */
3251 IEM_MC_BEGIN(0, 2);
3252 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3254
3255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3258 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3259
3260 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3261 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3262
3263 IEM_MC_ADVANCE_RIP();
3264 IEM_MC_END();
3265 }
3266 return VINF_SUCCESS;
3267}
3268
3269/* Opcode 0xf3 0x0f 0x28 - invalid */
3270/* Opcode 0xf2 0x0f 0x28 - invalid */
3271
3272/**
3273 * @opcode 0x29
3274 * @oppfx none
3275 * @opcpuid sse
3276 * @opgroup og_sse_simdfp_datamove
3277 * @opxcpttype 1
3278 * @optest op1=1 op2=2 -> op1=2
3279 * @optest op1=0 op2=-42 -> op1=-42
3280 */
3281FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3282{
3283 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3285 if (IEM_IS_MODRM_REG_MODE(bRm))
3286 {
3287 /*
3288 * Register, register.
3289 */
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_BEGIN(0, 0);
3292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3294 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3295 IEM_GET_MODRM_REG(pVCpu, bRm));
3296 IEM_MC_ADVANCE_RIP();
3297 IEM_MC_END();
3298 }
3299 else
3300 {
3301 /*
3302 * Memory, register.
3303 */
3304 IEM_MC_BEGIN(0, 2);
3305 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3307
3308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3310 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3312
3313 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3314 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3315
3316 IEM_MC_ADVANCE_RIP();
3317 IEM_MC_END();
3318 }
3319 return VINF_SUCCESS;
3320}
3321
3322/**
3323 * @opcode 0x29
3324 * @oppfx 66
3325 * @opcpuid sse2
3326 * @opgroup og_sse2_pcksclr_datamove
3327 * @opxcpttype 1
3328 * @optest op1=1 op2=2 -> op1=2
3329 * @optest op1=0 op2=-42 -> op1=-42
3330 */
3331FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3332{
3333 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3335 if (IEM_IS_MODRM_REG_MODE(bRm))
3336 {
3337 /*
3338 * Register, register.
3339 */
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 IEM_MC_BEGIN(0, 0);
3342 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3343 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3344 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3345 IEM_GET_MODRM_REG(pVCpu, bRm));
3346 IEM_MC_ADVANCE_RIP();
3347 IEM_MC_END();
3348 }
3349 else
3350 {
3351 /*
3352 * Memory, register.
3353 */
3354 IEM_MC_BEGIN(0, 2);
3355 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3357
3358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3360 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3361 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3362
3363 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3364 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3365
3366 IEM_MC_ADVANCE_RIP();
3367 IEM_MC_END();
3368 }
3369 return VINF_SUCCESS;
3370}
3371
3372/* Opcode 0xf3 0x0f 0x29 - invalid */
3373/* Opcode 0xf2 0x0f 0x29 - invalid */
3374
3375
3376/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3377FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3378/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3379FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3380/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3381FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
3382/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3383FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
3384
3385
3386/**
3387 * @opcode 0x2b
3388 * @opcodesub !11 mr/reg
3389 * @oppfx none
3390 * @opcpuid sse
3391 * @opgroup og_sse1_cachect
3392 * @opxcpttype 1
3393 * @optest op1=1 op2=2 -> op1=2
3394 * @optest op1=0 op2=-42 -> op1=-42
3395 */
3396FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3397{
3398 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3400 if (IEM_IS_MODRM_MEM_MODE(bRm))
3401 {
3402 /*
3403 * memory, register.
3404 */
3405 IEM_MC_BEGIN(0, 2);
3406 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3408
3409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3411 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3412 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3413
3414 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3415 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3416
3417 IEM_MC_ADVANCE_RIP();
3418 IEM_MC_END();
3419 }
3420 /* The register, register encoding is invalid. */
3421 else
3422 return IEMOP_RAISE_INVALID_OPCODE();
3423 return VINF_SUCCESS;
3424}
3425
3426/**
3427 * @opcode 0x2b
3428 * @opcodesub !11 mr/reg
3429 * @oppfx 0x66
3430 * @opcpuid sse2
3431 * @opgroup og_sse2_cachect
3432 * @opxcpttype 1
3433 * @optest op1=1 op2=2 -> op1=2
3434 * @optest op1=0 op2=-42 -> op1=-42
3435 */
3436FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3437{
3438 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3440 if (IEM_IS_MODRM_MEM_MODE(bRm))
3441 {
3442 /*
3443 * memory, register.
3444 */
3445 IEM_MC_BEGIN(0, 2);
3446 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3453
3454 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3455 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3456
3457 IEM_MC_ADVANCE_RIP();
3458 IEM_MC_END();
3459 }
3460 /* The register, register encoding is invalid. */
3461 else
3462 return IEMOP_RAISE_INVALID_OPCODE();
3463 return VINF_SUCCESS;
3464}
3465/* Opcode 0xf3 0x0f 0x2b - invalid */
3466/* Opcode 0xf2 0x0f 0x2b - invalid */
3467
3468
3469/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3470FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3471/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3472FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3473/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3474FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
3475/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
3476FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
3477
3478/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
3479FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
3480/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
3481FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
3482/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
3483FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
3484/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
3485FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
3486
3487/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
3488FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
3489/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
3490FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
3491/* Opcode 0xf3 0x0f 0x2e - invalid */
3492/* Opcode 0xf2 0x0f 0x2e - invalid */
3493
3494/** Opcode 0x0f 0x2f - comiss Vss, Wss */
3495FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
3496/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
3497FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
3498/* Opcode 0xf3 0x0f 0x2f - invalid */
3499/* Opcode 0xf2 0x0f 0x2f - invalid */
3500
3501/** Opcode 0x0f 0x30. */
3502FNIEMOP_DEF(iemOp_wrmsr)
3503{
3504 IEMOP_MNEMONIC(wrmsr, "wrmsr");
3505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3506 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
3507}
3508
3509
3510/** Opcode 0x0f 0x31. */
3511FNIEMOP_DEF(iemOp_rdtsc)
3512{
3513 IEMOP_MNEMONIC(rdtsc, "rdtsc");
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
3516}
3517
3518
3519/** Opcode 0x0f 0x33. */
3520FNIEMOP_DEF(iemOp_rdmsr)
3521{
3522 IEMOP_MNEMONIC(rdmsr, "rdmsr");
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
3525}
3526
3527
3528/** Opcode 0x0f 0x34. */
3529FNIEMOP_DEF(iemOp_rdpmc)
3530{
3531 IEMOP_MNEMONIC(rdpmc, "rdpmc");
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3533 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
3534}
3535
3536
3537/** Opcode 0x0f 0x34. */
3538FNIEMOP_DEF(iemOp_sysenter)
3539{
3540 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3542 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
3543}
3544
3545/** Opcode 0x0f 0x35. */
3546FNIEMOP_DEF(iemOp_sysexit)
3547{
3548 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3550 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
3551}
3552
3553/** Opcode 0x0f 0x37. */
3554FNIEMOP_STUB(iemOp_getsec);
3555
3556
3557/** Opcode 0x0f 0x38. */
3558FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
3559{
3560#ifdef IEM_WITH_THREE_0F_38
3561 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3562 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3563#else
3564 IEMOP_BITCH_ABOUT_STUB();
3565 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3566#endif
3567}
3568
3569
3570/** Opcode 0x0f 0x3a. */
3571FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
3572{
3573#ifdef IEM_WITH_THREE_0F_3A
3574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3575 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3576#else
3577 IEMOP_BITCH_ABOUT_STUB();
3578 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3579#endif
3580}
3581
3582
3583/**
3584 * Implements a conditional move.
3585 *
3586 * Wish there was an obvious way to do this where we could share and reduce
3587 * code bloat.
3588 *
3589 * @param a_Cnd The conditional "microcode" operation.
3590 */
3591#define CMOV_X(a_Cnd) \
3592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3593 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3594 { \
3595 switch (pVCpu->iem.s.enmEffOpSize) \
3596 { \
3597 case IEMMODE_16BIT: \
3598 IEM_MC_BEGIN(0, 1); \
3599 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3600 a_Cnd { \
3601 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3602 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3603 } IEM_MC_ENDIF(); \
3604 IEM_MC_ADVANCE_RIP(); \
3605 IEM_MC_END(); \
3606 return VINF_SUCCESS; \
3607 \
3608 case IEMMODE_32BIT: \
3609 IEM_MC_BEGIN(0, 1); \
3610 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3611 a_Cnd { \
3612 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3613 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3614 } IEM_MC_ELSE() { \
3615 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3616 } IEM_MC_ENDIF(); \
3617 IEM_MC_ADVANCE_RIP(); \
3618 IEM_MC_END(); \
3619 return VINF_SUCCESS; \
3620 \
3621 case IEMMODE_64BIT: \
3622 IEM_MC_BEGIN(0, 1); \
3623 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3624 a_Cnd { \
3625 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3626 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3627 } IEM_MC_ENDIF(); \
3628 IEM_MC_ADVANCE_RIP(); \
3629 IEM_MC_END(); \
3630 return VINF_SUCCESS; \
3631 \
3632 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3633 } \
3634 } \
3635 else \
3636 { \
3637 switch (pVCpu->iem.s.enmEffOpSize) \
3638 { \
3639 case IEMMODE_16BIT: \
3640 IEM_MC_BEGIN(0, 2); \
3641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3642 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3644 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3645 a_Cnd { \
3646 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3647 } IEM_MC_ENDIF(); \
3648 IEM_MC_ADVANCE_RIP(); \
3649 IEM_MC_END(); \
3650 return VINF_SUCCESS; \
3651 \
3652 case IEMMODE_32BIT: \
3653 IEM_MC_BEGIN(0, 2); \
3654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3655 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3657 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3658 a_Cnd { \
3659 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3660 } IEM_MC_ELSE() { \
3661 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3662 } IEM_MC_ENDIF(); \
3663 IEM_MC_ADVANCE_RIP(); \
3664 IEM_MC_END(); \
3665 return VINF_SUCCESS; \
3666 \
3667 case IEMMODE_64BIT: \
3668 IEM_MC_BEGIN(0, 2); \
3669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3670 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3672 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3673 a_Cnd { \
3674 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3675 } IEM_MC_ENDIF(); \
3676 IEM_MC_ADVANCE_RIP(); \
3677 IEM_MC_END(); \
3678 return VINF_SUCCESS; \
3679 \
3680 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3681 } \
3682 } do {} while (0)
3683
3684
3685
3686/** Opcode 0x0f 0x40. */
3687FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3688{
3689 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3690 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3691}
3692
3693
3694/** Opcode 0x0f 0x41. */
3695FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3696{
3697 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3698 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3699}
3700
3701
3702/** Opcode 0x0f 0x42. */
3703FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3704{
3705 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3706 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3707}
3708
3709
3710/** Opcode 0x0f 0x43. */
3711FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3712{
3713 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3714 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3715}
3716
3717
3718/** Opcode 0x0f 0x44. */
3719FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3720{
3721 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3722 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3723}
3724
3725
3726/** Opcode 0x0f 0x45. */
3727FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3728{
3729 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3730 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3731}
3732
3733
3734/** Opcode 0x0f 0x46. */
3735FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3736{
3737 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3738 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3739}
3740
3741
3742/** Opcode 0x0f 0x47. */
3743FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3744{
3745 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3746 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3747}
3748
3749
3750/** Opcode 0x0f 0x48. */
3751FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3752{
3753 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3754 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3755}
3756
3757
3758/** Opcode 0x0f 0x49. */
3759FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3760{
3761 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3762 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3763}
3764
3765
3766/** Opcode 0x0f 0x4a. */
3767FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3768{
3769 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3770 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3771}
3772
3773
3774/** Opcode 0x0f 0x4b. */
3775FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3776{
3777 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3778 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3779}
3780
3781
3782/** Opcode 0x0f 0x4c. */
3783FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3784{
3785 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3786 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3787}
3788
3789
3790/** Opcode 0x0f 0x4d. */
3791FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3792{
3793 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3794 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3795}
3796
3797
3798/** Opcode 0x0f 0x4e. */
3799FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3800{
3801 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3802 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3803}
3804
3805
3806/** Opcode 0x0f 0x4f. */
3807FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
3808{
3809 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
3810 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3811}
3812
3813#undef CMOV_X
3814
3815/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3816FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3817/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3818FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3819/* Opcode 0xf3 0x0f 0x50 - invalid */
3820/* Opcode 0xf2 0x0f 0x50 - invalid */
3821
3822/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3823FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3824/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3825FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3826/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3827FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3828/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3829FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3830
3831/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3832FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3833/* Opcode 0x66 0x0f 0x52 - invalid */
3834/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3835FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3836/* Opcode 0xf2 0x0f 0x52 - invalid */
3837
3838/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3839FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3840/* Opcode 0x66 0x0f 0x53 - invalid */
3841/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3842FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3843/* Opcode 0xf2 0x0f 0x53 - invalid */
3844
3845
3846/** Opcode 0x0f 0x54 - andps Vps, Wps */
3847FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3848{
3849 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3850 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3851}
3852
3853
3854/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3855FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3856{
3857 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3858 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3859}
3860
3861
3862/* Opcode 0xf3 0x0f 0x54 - invalid */
3863/* Opcode 0xf2 0x0f 0x54 - invalid */
3864
3865
3866/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3867FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3868{
3869 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3870 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3871}
3872
3873
3874/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
3875FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
3876{
3877 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3878 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
3879}
3880
3881
3882/* Opcode 0xf3 0x0f 0x55 - invalid */
3883/* Opcode 0xf2 0x0f 0x55 - invalid */
3884
3885
3886/** Opcode 0x0f 0x56 - orps Vps, Wps */
3887FNIEMOP_DEF(iemOp_orps_Vps_Wps)
3888{
3889 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3890 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3891}
3892
3893
3894/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
3895FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
3896{
3897 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3898 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
3899}
3900
3901
3902/* Opcode 0xf3 0x0f 0x56 - invalid */
3903/* Opcode 0xf2 0x0f 0x56 - invalid */
3904
3905
3906/** Opcode 0x0f 0x57 - xorps Vps, Wps */
3907FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
3908{
3909 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3910 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3911}
3912
3913
3914/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
3915FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
3916{
3917 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3918 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
3919}
3920
3921
3922/* Opcode 0xf3 0x0f 0x57 - invalid */
3923/* Opcode 0xf2 0x0f 0x57 - invalid */
3924
3925/** Opcode 0x0f 0x58 - addps Vps, Wps */
3926FNIEMOP_DEF(iemOp_addps_Vps_Wps)
3927{
3928 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3929 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
3930}
3931
3932
3933/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
3934FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
3935{
3936 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3937 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
3938}
3939
3940
3941/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
3942FNIEMOP_STUB(iemOp_addss_Vss_Wss);
3943/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
3944FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd);
3945
3946
3947/** Opcode 0x0f 0x59 - mulps Vps, Wps */
3948FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
3949{
3950 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3951 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
3952}
3953
3954
3955/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
3956FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
3957{
3958 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3959 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
3960}
3961
3962
3963/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
3964FNIEMOP_STUB(iemOp_mulss_Vss_Wss);
3965/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
3966FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd);
3967
3968/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
3969FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
3970/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
3971FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
3972/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
3973FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
3974/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
3975FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
3976
3977/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
3978FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
3979/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
3980FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
3981/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
3982FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
3983/* Opcode 0xf2 0x0f 0x5b - invalid */
3984
3985
3986/** Opcode 0x0f 0x5c - subps Vps, Wps */
3987FNIEMOP_DEF(iemOp_subps_Vps_Wps)
3988{
3989 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3990 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
3991}
3992
3993
3994/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
3995FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
3996{
3997 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3998 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
3999}
4000
4001
4002/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
4003FNIEMOP_STUB(iemOp_subss_Vss_Wss);
4004/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
4005FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd);
4006
4007/** Opcode 0x0f 0x5d - minps Vps, Wps */
4008FNIEMOP_STUB(iemOp_minps_Vps_Wps);
4009/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
4010FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd);
4011/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
4012FNIEMOP_STUB(iemOp_minss_Vss_Wss);
4013/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
4014FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
4015
4016/** Opcode 0x0f 0x5e - divps Vps, Wps */
4017FNIEMOP_STUB(iemOp_divps_Vps_Wps);
4018/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
4019FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd);
4020/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
4021FNIEMOP_STUB(iemOp_divss_Vss_Wss);
4022/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
4023FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
4024
4025/** Opcode 0x0f 0x5f - maxps Vps, Wps */
4026FNIEMOP_STUB(iemOp_maxps_Vps_Wps);
4027/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
4028FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd);
4029/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
4030FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
4031/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
4032FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
4033
4034
4035/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
4036FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
4037{
4038 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4039 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
4040}
4041
4042
4043/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
4044FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
4045{
4046 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4047 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
4048}
4049
4050
4051/* Opcode 0xf3 0x0f 0x60 - invalid */
4052
4053
4054/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
4055FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
4056{
4057 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
4058 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4059 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
4060}
4061
4062
4063/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
4064FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
4065{
4066 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4067 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
4068}
4069
4070
4071/* Opcode 0xf3 0x0f 0x61 - invalid */
4072
4073
4074/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
4075FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
4076{
4077 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4078 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
4079}
4080
4081
4082/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
4083FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
4084{
4085 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4086 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
4087}
4088
4089
4090/* Opcode 0xf3 0x0f 0x62 - invalid */
4091
4092
4093
4094/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
4095FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
4096{
4097 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4098 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
4099}
4100
4101
4102/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
4103FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
4104{
4105 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4106 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
4107}
4108
4109
4110/* Opcode 0xf3 0x0f 0x63 - invalid */
4111
4112
4113/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
4114FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
4115{
4116 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4117 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
4118}
4119
4120
4121/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
4122FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
4123{
4124 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4125 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
4126}
4127
4128
4129/* Opcode 0xf3 0x0f 0x64 - invalid */
4130
4131
4132/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
4133FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
4134{
4135 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4136 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
4137}
4138
4139
4140/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
4141FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
4142{
4143 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4144 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
4145}
4146
4147
4148/* Opcode 0xf3 0x0f 0x65 - invalid */
4149
4150
4151/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
4152FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
4153{
4154 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4155 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
4156}
4157
4158
4159/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
4160FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
4161{
4162 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4163 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
4164}
4165
4166
4167/* Opcode 0xf3 0x0f 0x66 - invalid */
4168
4169
4170/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
4171FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
4172{
4173 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4174 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
4175}
4176
4177
4178/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
4179FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
4180{
4181 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4182 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
4183}
4184
4185
4186/* Opcode 0xf3 0x0f 0x67 - invalid */
4187
4188
4189/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
4190 * @note Intel and AMD both uses Qd for the second parameter, however they
4191 * both list it as a mmX/mem64 operand and intel describes it as being
4192 * loaded as a qword, so it should be Qq, shouldn't it? */
4193FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
4194{
4195 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4196 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
4197}
4198
4199
4200/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
4201FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
4202{
4203 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4204 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
4205}
4206
4207
4208/* Opcode 0xf3 0x0f 0x68 - invalid */
4209
4210
4211/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
4212 * @note Intel and AMD both uses Qd for the second parameter, however they
4213 * both list it as a mmX/mem64 operand and intel describes it as being
4214 * loaded as a qword, so it should be Qq, shouldn't it? */
4215FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
4216{
4217 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4218 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
4219}
4220
4221
4222/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
4223FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
4224{
4225 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4226 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
4227
4228}
4229
4230
4231/* Opcode 0xf3 0x0f 0x69 - invalid */
4232
4233
4234/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
4235 * @note Intel and AMD both uses Qd for the second parameter, however they
4236 * both list it as a mmX/mem64 operand and intel describes it as being
4237 * loaded as a qword, so it should be Qq, shouldn't it? */
4238FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
4239{
4240 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4241 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
4242}
4243
4244
4245/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
4246FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
4247{
4248 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4249 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
4250}
4251
4252
4253/* Opcode 0xf3 0x0f 0x6a - invalid */
4254
4255
4256/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
4257FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
4258{
4259 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4260 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
4261}
4262
4263
4264/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
4265FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
4266{
4267 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4268 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
4269}
4270
4271
4272/* Opcode 0xf3 0x0f 0x6b - invalid */
4273
4274
4275/* Opcode 0x0f 0x6c - invalid */
4276
4277
4278/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
4279FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
4280{
4281 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4282 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
4283}
4284
4285
4286/* Opcode 0xf3 0x0f 0x6c - invalid */
4287/* Opcode 0xf2 0x0f 0x6c - invalid */
4288
4289
4290/* Opcode 0x0f 0x6d - invalid */
4291
4292
4293/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
4294FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
4295{
4296 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4297 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
4298}
4299
4300
4301/* Opcode 0xf3 0x0f 0x6d - invalid */
4302
4303
4304FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
4305{
4306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4307 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4308 {
4309 /**
4310 * @opcode 0x6e
4311 * @opcodesub rex.w=1
4312 * @oppfx none
4313 * @opcpuid mmx
4314 * @opgroup og_mmx_datamove
4315 * @opxcpttype 5
4316 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4317 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4318 */
4319 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4320 if (IEM_IS_MODRM_REG_MODE(bRm))
4321 {
4322 /* MMX, greg64 */
4323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4324 IEM_MC_BEGIN(0, 1);
4325 IEM_MC_LOCAL(uint64_t, u64Tmp);
4326
4327 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4328 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4329
4330 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4331 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4332 IEM_MC_FPU_TO_MMX_MODE();
4333
4334 IEM_MC_ADVANCE_RIP();
4335 IEM_MC_END();
4336 }
4337 else
4338 {
4339 /* MMX, [mem64] */
4340 IEM_MC_BEGIN(0, 2);
4341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4342 IEM_MC_LOCAL(uint64_t, u64Tmp);
4343
4344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4346 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4347 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4348
4349 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4350 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4351 IEM_MC_FPU_TO_MMX_MODE();
4352
4353 IEM_MC_ADVANCE_RIP();
4354 IEM_MC_END();
4355 }
4356 }
4357 else
4358 {
4359 /**
4360 * @opdone
4361 * @opcode 0x6e
4362 * @opcodesub rex.w=0
4363 * @oppfx none
4364 * @opcpuid mmx
4365 * @opgroup og_mmx_datamove
4366 * @opxcpttype 5
4367 * @opfunction iemOp_movd_q_Pd_Ey
4368 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4369 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4370 */
4371 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4372 if (IEM_IS_MODRM_REG_MODE(bRm))
4373 {
4374 /* MMX, greg */
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4376 IEM_MC_BEGIN(0, 1);
4377 IEM_MC_LOCAL(uint64_t, u64Tmp);
4378
4379 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4380 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4381
4382 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4383 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4384 IEM_MC_FPU_TO_MMX_MODE();
4385
4386 IEM_MC_ADVANCE_RIP();
4387 IEM_MC_END();
4388 }
4389 else
4390 {
4391 /* MMX, [mem] */
4392 IEM_MC_BEGIN(0, 2);
4393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4394 IEM_MC_LOCAL(uint32_t, u32Tmp);
4395
4396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4399 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4400
4401 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4402 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4403 IEM_MC_FPU_TO_MMX_MODE();
4404
4405 IEM_MC_ADVANCE_RIP();
4406 IEM_MC_END();
4407 }
4408 }
4409 return VINF_SUCCESS;
4410}
4411
4412FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4413{
4414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4415 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4416 {
4417 /**
4418 * @opcode 0x6e
4419 * @opcodesub rex.w=1
4420 * @oppfx 0x66
4421 * @opcpuid sse2
4422 * @opgroup og_sse2_simdint_datamove
4423 * @opxcpttype 5
4424 * @optest 64-bit / op1=1 op2=2 -> op1=2
4425 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4426 */
4427 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4428 if (IEM_IS_MODRM_REG_MODE(bRm))
4429 {
4430 /* XMM, greg64 */
4431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4432 IEM_MC_BEGIN(0, 1);
4433 IEM_MC_LOCAL(uint64_t, u64Tmp);
4434
4435 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4436 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4437
4438 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4439 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4440
4441 IEM_MC_ADVANCE_RIP();
4442 IEM_MC_END();
4443 }
4444 else
4445 {
4446 /* XMM, [mem64] */
4447 IEM_MC_BEGIN(0, 2);
4448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4449 IEM_MC_LOCAL(uint64_t, u64Tmp);
4450
4451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4453 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4454 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4455
4456 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4457 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4458
4459 IEM_MC_ADVANCE_RIP();
4460 IEM_MC_END();
4461 }
4462 }
4463 else
4464 {
4465 /**
4466 * @opdone
4467 * @opcode 0x6e
4468 * @opcodesub rex.w=0
4469 * @oppfx 0x66
4470 * @opcpuid sse2
4471 * @opgroup og_sse2_simdint_datamove
4472 * @opxcpttype 5
4473 * @opfunction iemOp_movd_q_Vy_Ey
4474 * @optest op1=1 op2=2 -> op1=2
4475 * @optest op1=0 op2=-42 -> op1=-42
4476 */
4477 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4478 if (IEM_IS_MODRM_REG_MODE(bRm))
4479 {
4480 /* XMM, greg32 */
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482 IEM_MC_BEGIN(0, 1);
4483 IEM_MC_LOCAL(uint32_t, u32Tmp);
4484
4485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4487
4488 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4489 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4490
4491 IEM_MC_ADVANCE_RIP();
4492 IEM_MC_END();
4493 }
4494 else
4495 {
4496 /* XMM, [mem32] */
4497 IEM_MC_BEGIN(0, 2);
4498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4499 IEM_MC_LOCAL(uint32_t, u32Tmp);
4500
4501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4503 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4504 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4505
4506 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4507 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4508
4509 IEM_MC_ADVANCE_RIP();
4510 IEM_MC_END();
4511 }
4512 }
4513 return VINF_SUCCESS;
4514}
4515
4516/* Opcode 0xf3 0x0f 0x6e - invalid */
4517
4518
4519/**
4520 * @opcode 0x6f
4521 * @oppfx none
4522 * @opcpuid mmx
4523 * @opgroup og_mmx_datamove
4524 * @opxcpttype 5
4525 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4526 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4527 */
4528FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4529{
4530 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4532 if (IEM_IS_MODRM_REG_MODE(bRm))
4533 {
4534 /*
4535 * Register, register.
4536 */
4537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4538 IEM_MC_BEGIN(0, 1);
4539 IEM_MC_LOCAL(uint64_t, u64Tmp);
4540
4541 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4542 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4543
4544 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4545 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4546 IEM_MC_FPU_TO_MMX_MODE();
4547
4548 IEM_MC_ADVANCE_RIP();
4549 IEM_MC_END();
4550 }
4551 else
4552 {
4553 /*
4554 * Register, memory.
4555 */
4556 IEM_MC_BEGIN(0, 2);
4557 IEM_MC_LOCAL(uint64_t, u64Tmp);
4558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4559
4560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4562 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4563 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4564
4565 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4566 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4567 IEM_MC_FPU_TO_MMX_MODE();
4568
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 }
4572 return VINF_SUCCESS;
4573}
4574
4575/**
4576 * @opcode 0x6f
4577 * @oppfx 0x66
4578 * @opcpuid sse2
4579 * @opgroup og_sse2_simdint_datamove
4580 * @opxcpttype 1
4581 * @optest op1=1 op2=2 -> op1=2
4582 * @optest op1=0 op2=-42 -> op1=-42
4583 */
4584FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4585{
4586 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4588 if (IEM_IS_MODRM_REG_MODE(bRm))
4589 {
4590 /*
4591 * Register, register.
4592 */
4593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4594 IEM_MC_BEGIN(0, 0);
4595
4596 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4597 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4598
4599 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4600 IEM_GET_MODRM_RM(pVCpu, bRm));
4601 IEM_MC_ADVANCE_RIP();
4602 IEM_MC_END();
4603 }
4604 else
4605 {
4606 /*
4607 * Register, memory.
4608 */
4609 IEM_MC_BEGIN(0, 2);
4610 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4612
4613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4615 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4616 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4617
4618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4619 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4620
4621 IEM_MC_ADVANCE_RIP();
4622 IEM_MC_END();
4623 }
4624 return VINF_SUCCESS;
4625}
4626
4627/**
4628 * @opcode 0x6f
4629 * @oppfx 0xf3
4630 * @opcpuid sse2
4631 * @opgroup og_sse2_simdint_datamove
4632 * @opxcpttype 4UA
4633 * @optest op1=1 op2=2 -> op1=2
4634 * @optest op1=0 op2=-42 -> op1=-42
4635 */
4636FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4637{
4638 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4640 if (IEM_IS_MODRM_REG_MODE(bRm))
4641 {
4642 /*
4643 * Register, register.
4644 */
4645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4646 IEM_MC_BEGIN(0, 0);
4647 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4649 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4650 IEM_GET_MODRM_RM(pVCpu, bRm));
4651 IEM_MC_ADVANCE_RIP();
4652 IEM_MC_END();
4653 }
4654 else
4655 {
4656 /*
4657 * Register, memory.
4658 */
4659 IEM_MC_BEGIN(0, 2);
4660 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4662
4663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4665 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4667 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4668 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4669
4670 IEM_MC_ADVANCE_RIP();
4671 IEM_MC_END();
4672 }
4673 return VINF_SUCCESS;
4674}
4675
4676
4677/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
4678FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
4679{
4680 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4682 if (IEM_IS_MODRM_REG_MODE(bRm))
4683 {
4684 /*
4685 * Register, register.
4686 */
4687 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4689
4690 IEM_MC_BEGIN(3, 0);
4691 IEM_MC_ARG(uint64_t *, pDst, 0);
4692 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4693 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4694 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4695 IEM_MC_PREPARE_FPU_USAGE();
4696 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4697 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
4698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4699 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4700 IEM_MC_FPU_TO_MMX_MODE();
4701 IEM_MC_ADVANCE_RIP();
4702 IEM_MC_END();
4703 }
4704 else
4705 {
4706 /*
4707 * Register, memory.
4708 */
4709 IEM_MC_BEGIN(3, 2);
4710 IEM_MC_ARG(uint64_t *, pDst, 0);
4711 IEM_MC_LOCAL(uint64_t, uSrc);
4712 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4714
4715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4716 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4717 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4720
4721 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4722 IEM_MC_PREPARE_FPU_USAGE();
4723 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4724 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4725 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4726 IEM_MC_FPU_TO_MMX_MODE();
4727
4728 IEM_MC_ADVANCE_RIP();
4729 IEM_MC_END();
4730 }
4731 return VINF_SUCCESS;
4732}
4733
4734
4735/**
4736 * Common worker for SSE2 instructions on the forms:
4737 * pshufd xmm1, xmm2/mem128, imm8
4738 * pshufhw xmm1, xmm2/mem128, imm8
4739 * pshuflw xmm1, xmm2/mem128, imm8
4740 *
4741 * Proper alignment of the 128-bit operand is enforced.
4742 * Exceptions type 4. SSE2 cpuid checks.
4743 */
4744FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
4745{
4746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4747 if (IEM_IS_MODRM_REG_MODE(bRm))
4748 {
4749 /*
4750 * Register, register.
4751 */
4752 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4754
4755 IEM_MC_BEGIN(3, 0);
4756 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4757 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4758 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4760 IEM_MC_PREPARE_SSE_USAGE();
4761 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4762 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4763 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4764 IEM_MC_ADVANCE_RIP();
4765 IEM_MC_END();
4766 }
4767 else
4768 {
4769 /*
4770 * Register, memory.
4771 */
4772 IEM_MC_BEGIN(3, 2);
4773 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4774 IEM_MC_LOCAL(RTUINT128U, uSrc);
4775 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4777
4778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4779 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4780 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4782 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4783
4784 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4785 IEM_MC_PREPARE_SSE_USAGE();
4786 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4787 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4788
4789 IEM_MC_ADVANCE_RIP();
4790 IEM_MC_END();
4791 }
4792 return VINF_SUCCESS;
4793}
4794
4795
4796/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4797FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4798{
4799 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4800 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
4801}
4802
4803
4804/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
4805FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
4806{
4807 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4808 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
4809}
4810
4811
4812/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
4813FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
4814{
4815 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4816 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
4817}
4818
4819
4820/**
4821 * Common worker for MMX instructions of the form:
4822 * psrlw mm, imm8
4823 * psraw mm, imm8
4824 * psllw mm, imm8
4825 * psrld mm, imm8
4826 * psrad mm, imm8
4827 * pslld mm, imm8
4828 * psrlq mm, imm8
4829 * psllq mm, imm8
4830 *
4831 */
4832FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
4833{
4834 if (IEM_IS_MODRM_REG_MODE(bRm))
4835 {
4836 /*
4837 * Register, immediate.
4838 */
4839 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4841
4842 IEM_MC_BEGIN(2, 0);
4843 IEM_MC_ARG(uint64_t *, pDst, 0);
4844 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4845 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4846 IEM_MC_PREPARE_FPU_USAGE();
4847 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4848 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
4849 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4850 IEM_MC_FPU_TO_MMX_MODE();
4851 IEM_MC_ADVANCE_RIP();
4852 IEM_MC_END();
4853 }
4854 else
4855 {
4856 /*
4857 * Register, memory not supported.
4858 */
4859 /// @todo Caller already enforced register mode?!
4860 }
4861 return VINF_SUCCESS;
4862}
4863
4864
4865/**
4866 * Common worker for SSE2 instructions of the form:
4867 * psrlw xmm, imm8
4868 * psraw xmm, imm8
4869 * psllw xmm, imm8
4870 * psrld xmm, imm8
4871 * psrad xmm, imm8
4872 * pslld xmm, imm8
4873 * psrlq xmm, imm8
4874 * psllq xmm, imm8
4875 *
4876 */
4877FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
4878{
4879 if (IEM_IS_MODRM_REG_MODE(bRm))
4880 {
4881 /*
4882 * Register, immediate.
4883 */
4884 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4886
4887 IEM_MC_BEGIN(2, 0);
4888 IEM_MC_ARG(PRTUINT128U, pDst, 0);
4889 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
4890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4891 IEM_MC_PREPARE_SSE_USAGE();
4892 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4893 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
4894 IEM_MC_ADVANCE_RIP();
4895 IEM_MC_END();
4896 }
4897 else
4898 {
4899 /*
4900 * Register, memory.
4901 */
4902 /// @todo Caller already enforced register mode?!
4903 }
4904 return VINF_SUCCESS;
4905}
4906
4907
4908/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
4909FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
4910{
4911// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4912 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
4913}
4914
4915
4916/** Opcode 0x66 0x0f 0x71 11/2. */
4917FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
4918{
4919// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4920 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
4921}
4922
4923
4924/** Opcode 0x0f 0x71 11/4. */
4925FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
4926{
4927// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4928 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
4929}
4930
4931
4932/** Opcode 0x66 0x0f 0x71 11/4. */
4933FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
4934{
4935// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4936 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
4937}
4938
4939
4940/** Opcode 0x0f 0x71 11/6. */
4941FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
4942{
4943// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4944 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
4945}
4946
4947
4948/** Opcode 0x66 0x0f 0x71 11/6. */
4949FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
4950{
4951// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4952 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
4953}
4954
4955
4956/**
4957 * Group 12 jump table for register variant.
4958 */
4959IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
4960{
4961 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4962 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4963 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4964 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4965 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4966 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4967 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4968 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4969};
4970AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
4971
4972
4973/** Opcode 0x0f 0x71. */
4974FNIEMOP_DEF(iemOp_Grp12)
4975{
4976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4977 if (IEM_IS_MODRM_REG_MODE(bRm))
4978 /* register, register */
4979 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4980 + pVCpu->iem.s.idxPrefix], bRm);
4981 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4982}
4983
4984
4985/** Opcode 0x0f 0x72 11/2. */
4986FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
4987{
4988// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4989 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
4990}
4991
4992
4993/** Opcode 0x66 0x0f 0x72 11/2. */
4994FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
4995{
4996// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4997 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
4998}
4999
5000
5001/** Opcode 0x0f 0x72 11/4. */
5002FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
5003{
5004// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5005 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
5006}
5007
5008
5009/** Opcode 0x66 0x0f 0x72 11/4. */
5010FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
5011{
5012// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5013 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
5014}
5015
5016
5017/** Opcode 0x0f 0x72 11/6. */
5018FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
5019{
5020// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5021 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
5022}
5023
5024/** Opcode 0x66 0x0f 0x72 11/6. */
5025FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
5026{
5027// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5028 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
5029}
5030
5031
5032/**
5033 * Group 13 jump table for register variant.
5034 */
5035IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
5036{
5037 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5038 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5039 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5040 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5041 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5042 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5043 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5044 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5045};
5046AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
5047
5048/** Opcode 0x0f 0x72. */
5049FNIEMOP_DEF(iemOp_Grp13)
5050{
5051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5052 if (IEM_IS_MODRM_REG_MODE(bRm))
5053 /* register, register */
5054 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5055 + pVCpu->iem.s.idxPrefix], bRm);
5056 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5057}
5058
5059
5060/** Opcode 0x0f 0x73 11/2. */
5061FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
5062{
5063// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5064 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
5065}
5066
5067
5068/** Opcode 0x66 0x0f 0x73 11/2. */
5069FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
5070{
5071// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5072 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
5073}
5074
5075
5076/** Opcode 0x66 0x0f 0x73 11/3. */
5077FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
5078{
5079// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5080 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
5081}
5082
5083
5084/** Opcode 0x0f 0x73 11/6. */
5085FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
5086{
5087// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5088 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
5089}
5090
5091
5092/** Opcode 0x66 0x0f 0x73 11/6. */
5093FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
5094{
5095// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5096 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
5097}
5098
5099
5100/** Opcode 0x66 0x0f 0x73 11/7. */
5101FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
5102{
5103// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5104 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
5105}
5106
5107/**
5108 * Group 14 jump table for register variant.
5109 */
5110IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
5111{
5112 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5113 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5114 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5115 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5116 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5117 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5118 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5119 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5120};
5121AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
5122
5123
5124/** Opcode 0x0f 0x73. */
5125FNIEMOP_DEF(iemOp_Grp14)
5126{
5127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5128 if (IEM_IS_MODRM_REG_MODE(bRm))
5129 /* register, register */
5130 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5131 + pVCpu->iem.s.idxPrefix], bRm);
5132 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5133}
5134
5135
5136/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
5137FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
5138{
5139 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5140 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
5141}
5142
5143
5144/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
5145FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
5146{
5147 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5148 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
5149}
5150
5151
5152/* Opcode 0xf3 0x0f 0x74 - invalid */
5153/* Opcode 0xf2 0x0f 0x74 - invalid */
5154
5155
5156/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
5157FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
5158{
5159 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5160 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
5161}
5162
5163
5164/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
5165FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
5166{
5167 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5168 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
5169}
5170
5171
5172/* Opcode 0xf3 0x0f 0x75 - invalid */
5173/* Opcode 0xf2 0x0f 0x75 - invalid */
5174
5175
5176/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
5177FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
5178{
5179 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5180 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
5181}
5182
5183
5184/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
5185FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
5186{
5187 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5188 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
5189}
5190
5191
5192/* Opcode 0xf3 0x0f 0x76 - invalid */
5193/* Opcode 0xf2 0x0f 0x76 - invalid */
5194
5195
5196/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
5197FNIEMOP_DEF(iemOp_emms)
5198{
5199 IEMOP_MNEMONIC(emms, "emms");
5200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5201
5202 IEM_MC_BEGIN(0,0);
5203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
5204 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5205 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5206 IEM_MC_FPU_FROM_MMX_MODE();
5207 IEM_MC_ADVANCE_RIP();
5208 IEM_MC_END();
5209 return VINF_SUCCESS;
5210}
5211
5212/* Opcode 0x66 0x0f 0x77 - invalid */
5213/* Opcode 0xf3 0x0f 0x77 - invalid */
5214/* Opcode 0xf2 0x0f 0x77 - invalid */
5215
5216/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
5217#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5218FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
5219{
5220 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
5221 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
5222 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
5223 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5224
5225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5226 if (IEM_IS_MODRM_REG_MODE(bRm))
5227 {
5228 /*
5229 * Register, register.
5230 */
5231 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5232 if (enmEffOpSize == IEMMODE_64BIT)
5233 {
5234 IEM_MC_BEGIN(2, 0);
5235 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5236 IEM_MC_ARG(uint64_t, u64Enc, 1);
5237 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5238 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5239 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
5240 IEM_MC_END();
5241 }
5242 else
5243 {
5244 IEM_MC_BEGIN(2, 0);
5245 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5246 IEM_MC_ARG(uint32_t, u32Enc, 1);
5247 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5248 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5249 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
5250 IEM_MC_END();
5251 }
5252 }
5253 else
5254 {
5255 /*
5256 * Memory, register.
5257 */
5258 if (enmEffOpSize == IEMMODE_64BIT)
5259 {
5260 IEM_MC_BEGIN(3, 0);
5261 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5262 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5263 IEM_MC_ARG(uint64_t, u64Enc, 2);
5264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5265 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5266 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5267 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5268 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
5269 IEM_MC_END();
5270 }
5271 else
5272 {
5273 IEM_MC_BEGIN(3, 0);
5274 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5275 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5276 IEM_MC_ARG(uint32_t, u32Enc, 2);
5277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5278 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5279 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5280 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5281 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
5282 IEM_MC_END();
5283 }
5284 }
5285 return VINF_SUCCESS;
5286}
5287#else
5288FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
5289#endif
5290
5291/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
5292FNIEMOP_STUB(iemOp_AmdGrp17);
5293/* Opcode 0xf3 0x0f 0x78 - invalid */
5294/* Opcode 0xf2 0x0f 0x78 - invalid */
5295
5296/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
5297#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5298FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
5299{
5300 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
5301 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
5302 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
5303 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5304
5305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5306 if (IEM_IS_MODRM_REG_MODE(bRm))
5307 {
5308 /*
5309 * Register, register.
5310 */
5311 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5312 if (enmEffOpSize == IEMMODE_64BIT)
5313 {
5314 IEM_MC_BEGIN(2, 0);
5315 IEM_MC_ARG(uint64_t, u64Val, 0);
5316 IEM_MC_ARG(uint64_t, u64Enc, 1);
5317 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5318 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5319 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
5320 IEM_MC_END();
5321 }
5322 else
5323 {
5324 IEM_MC_BEGIN(2, 0);
5325 IEM_MC_ARG(uint32_t, u32Val, 0);
5326 IEM_MC_ARG(uint32_t, u32Enc, 1);
5327 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5328 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5329 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
5330 IEM_MC_END();
5331 }
5332 }
5333 else
5334 {
5335 /*
5336 * Register, memory.
5337 */
5338 if (enmEffOpSize == IEMMODE_64BIT)
5339 {
5340 IEM_MC_BEGIN(3, 0);
5341 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5342 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5343 IEM_MC_ARG(uint64_t, u64Enc, 2);
5344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5345 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5346 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5347 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5348 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
5349 IEM_MC_END();
5350 }
5351 else
5352 {
5353 IEM_MC_BEGIN(3, 0);
5354 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5355 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5356 IEM_MC_ARG(uint32_t, u32Enc, 2);
5357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5358 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5359 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5360 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5361 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
5362 IEM_MC_END();
5363 }
5364 }
5365 return VINF_SUCCESS;
5366}
5367#else
5368FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
5369#endif
5370/* Opcode 0x66 0x0f 0x79 - invalid */
5371/* Opcode 0xf3 0x0f 0x79 - invalid */
5372/* Opcode 0xf2 0x0f 0x79 - invalid */
5373
5374/* Opcode 0x0f 0x7a - invalid */
5375/* Opcode 0x66 0x0f 0x7a - invalid */
5376/* Opcode 0xf3 0x0f 0x7a - invalid */
5377/* Opcode 0xf2 0x0f 0x7a - invalid */
5378
5379/* Opcode 0x0f 0x7b - invalid */
5380/* Opcode 0x66 0x0f 0x7b - invalid */
5381/* Opcode 0xf3 0x0f 0x7b - invalid */
5382/* Opcode 0xf2 0x0f 0x7b - invalid */
5383
5384/* Opcode 0x0f 0x7c - invalid */
5385/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5386FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
5387/* Opcode 0xf3 0x0f 0x7c - invalid */
5388/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5389FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
5390
5391/* Opcode 0x0f 0x7d - invalid */
5392/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5393FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
5394/* Opcode 0xf3 0x0f 0x7d - invalid */
5395/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5396FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
5397
5398
5399/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5400FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5401{
5402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5403 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5404 {
5405 /**
5406 * @opcode 0x7e
5407 * @opcodesub rex.w=1
5408 * @oppfx none
5409 * @opcpuid mmx
5410 * @opgroup og_mmx_datamove
5411 * @opxcpttype 5
5412 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5413 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5414 */
5415 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5416 if (IEM_IS_MODRM_REG_MODE(bRm))
5417 {
5418 /* greg64, MMX */
5419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5420 IEM_MC_BEGIN(0, 1);
5421 IEM_MC_LOCAL(uint64_t, u64Tmp);
5422
5423 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5424 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5425
5426 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5427 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5428 IEM_MC_FPU_TO_MMX_MODE();
5429
5430 IEM_MC_ADVANCE_RIP();
5431 IEM_MC_END();
5432 }
5433 else
5434 {
5435 /* [mem64], MMX */
5436 IEM_MC_BEGIN(0, 2);
5437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5438 IEM_MC_LOCAL(uint64_t, u64Tmp);
5439
5440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5442 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5443 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5444
5445 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5446 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5447 IEM_MC_FPU_TO_MMX_MODE();
5448
5449 IEM_MC_ADVANCE_RIP();
5450 IEM_MC_END();
5451 }
5452 }
5453 else
5454 {
5455 /**
5456 * @opdone
5457 * @opcode 0x7e
5458 * @opcodesub rex.w=0
5459 * @oppfx none
5460 * @opcpuid mmx
5461 * @opgroup og_mmx_datamove
5462 * @opxcpttype 5
5463 * @opfunction iemOp_movd_q_Pd_Ey
5464 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5465 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5466 */
5467 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5468 if (IEM_IS_MODRM_REG_MODE(bRm))
5469 {
5470 /* greg32, MMX */
5471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5472 IEM_MC_BEGIN(0, 1);
5473 IEM_MC_LOCAL(uint32_t, u32Tmp);
5474
5475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5477
5478 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5479 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5480 IEM_MC_FPU_TO_MMX_MODE();
5481
5482 IEM_MC_ADVANCE_RIP();
5483 IEM_MC_END();
5484 }
5485 else
5486 {
5487 /* [mem32], MMX */
5488 IEM_MC_BEGIN(0, 2);
5489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5490 IEM_MC_LOCAL(uint32_t, u32Tmp);
5491
5492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5495 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5496
5497 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5498 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5499 IEM_MC_FPU_TO_MMX_MODE();
5500
5501 IEM_MC_ADVANCE_RIP();
5502 IEM_MC_END();
5503 }
5504 }
5505 return VINF_SUCCESS;
5506
5507}
5508
5509
5510FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5511{
5512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5513 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5514 {
5515 /**
5516 * @opcode 0x7e
5517 * @opcodesub rex.w=1
5518 * @oppfx 0x66
5519 * @opcpuid sse2
5520 * @opgroup og_sse2_simdint_datamove
5521 * @opxcpttype 5
5522 * @optest 64-bit / op1=1 op2=2 -> op1=2
5523 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5524 */
5525 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5526 if (IEM_IS_MODRM_REG_MODE(bRm))
5527 {
5528 /* greg64, XMM */
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5530 IEM_MC_BEGIN(0, 1);
5531 IEM_MC_LOCAL(uint64_t, u64Tmp);
5532
5533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5535
5536 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5537 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5538
5539 IEM_MC_ADVANCE_RIP();
5540 IEM_MC_END();
5541 }
5542 else
5543 {
5544 /* [mem64], XMM */
5545 IEM_MC_BEGIN(0, 2);
5546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5547 IEM_MC_LOCAL(uint64_t, u64Tmp);
5548
5549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5551 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5552 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5553
5554 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5555 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5556
5557 IEM_MC_ADVANCE_RIP();
5558 IEM_MC_END();
5559 }
5560 }
5561 else
5562 {
5563 /**
5564 * @opdone
5565 * @opcode 0x7e
5566 * @opcodesub rex.w=0
5567 * @oppfx 0x66
5568 * @opcpuid sse2
5569 * @opgroup og_sse2_simdint_datamove
5570 * @opxcpttype 5
5571 * @opfunction iemOp_movd_q_Vy_Ey
5572 * @optest op1=1 op2=2 -> op1=2
5573 * @optest op1=0 op2=-42 -> op1=-42
5574 */
5575 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5576 if (IEM_IS_MODRM_REG_MODE(bRm))
5577 {
5578 /* greg32, XMM */
5579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5580 IEM_MC_BEGIN(0, 1);
5581 IEM_MC_LOCAL(uint32_t, u32Tmp);
5582
5583 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5584 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5585
5586 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5587 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5588
5589 IEM_MC_ADVANCE_RIP();
5590 IEM_MC_END();
5591 }
5592 else
5593 {
5594 /* [mem32], XMM */
5595 IEM_MC_BEGIN(0, 2);
5596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5597 IEM_MC_LOCAL(uint32_t, u32Tmp);
5598
5599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5601 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5602 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5603
5604 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5605 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5606
5607 IEM_MC_ADVANCE_RIP();
5608 IEM_MC_END();
5609 }
5610 }
5611 return VINF_SUCCESS;
5612
5613}
5614
5615/**
5616 * @opcode 0x7e
5617 * @oppfx 0xf3
5618 * @opcpuid sse2
5619 * @opgroup og_sse2_pcksclr_datamove
5620 * @opxcpttype none
5621 * @optest op1=1 op2=2 -> op1=2
5622 * @optest op1=0 op2=-42 -> op1=-42
5623 */
5624FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5625{
5626 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5628 if (IEM_IS_MODRM_REG_MODE(bRm))
5629 {
5630 /*
5631 * Register, register.
5632 */
5633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5634 IEM_MC_BEGIN(0, 2);
5635 IEM_MC_LOCAL(uint64_t, uSrc);
5636
5637 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5638 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5639
5640 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5641 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5642
5643 IEM_MC_ADVANCE_RIP();
5644 IEM_MC_END();
5645 }
5646 else
5647 {
5648 /*
5649 * Memory, register.
5650 */
5651 IEM_MC_BEGIN(0, 2);
5652 IEM_MC_LOCAL(uint64_t, uSrc);
5653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5654
5655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5659
5660 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5661 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5662
5663 IEM_MC_ADVANCE_RIP();
5664 IEM_MC_END();
5665 }
5666 return VINF_SUCCESS;
5667}
5668
5669/* Opcode 0xf2 0x0f 0x7e - invalid */
5670
5671
5672/** Opcode 0x0f 0x7f - movq Qq, Pq */
5673FNIEMOP_DEF(iemOp_movq_Qq_Pq)
5674{
5675 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
5676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5677 if (IEM_IS_MODRM_REG_MODE(bRm))
5678 {
5679 /*
5680 * Register, register.
5681 */
5682 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
5683 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
5684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5685 IEM_MC_BEGIN(0, 1);
5686 IEM_MC_LOCAL(uint64_t, u64Tmp);
5687 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5688 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5689 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5690 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
5691 IEM_MC_FPU_TO_MMX_MODE();
5692 IEM_MC_ADVANCE_RIP();
5693 IEM_MC_END();
5694 }
5695 else
5696 {
5697 /*
5698 * Memory, Register.
5699 */
5700 IEM_MC_BEGIN(0, 2);
5701 IEM_MC_LOCAL(uint64_t, u64Tmp);
5702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5703
5704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5706 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5707 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5708
5709 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5710 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5711 IEM_MC_FPU_TO_MMX_MODE();
5712
5713 IEM_MC_ADVANCE_RIP();
5714 IEM_MC_END();
5715 }
5716 return VINF_SUCCESS;
5717}
5718
5719/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
5720FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
5721{
5722 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5724 if (IEM_IS_MODRM_REG_MODE(bRm))
5725 {
5726 /*
5727 * Register, register.
5728 */
5729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5730 IEM_MC_BEGIN(0, 0);
5731 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5732 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5733 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5734 IEM_GET_MODRM_REG(pVCpu, bRm));
5735 IEM_MC_ADVANCE_RIP();
5736 IEM_MC_END();
5737 }
5738 else
5739 {
5740 /*
5741 * Register, memory.
5742 */
5743 IEM_MC_BEGIN(0, 2);
5744 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5746
5747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5749 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5750 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5751
5752 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5753 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5754
5755 IEM_MC_ADVANCE_RIP();
5756 IEM_MC_END();
5757 }
5758 return VINF_SUCCESS;
5759}
5760
5761/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
5762FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
5763{
5764 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5766 if (IEM_IS_MODRM_REG_MODE(bRm))
5767 {
5768 /*
5769 * Register, register.
5770 */
5771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5772 IEM_MC_BEGIN(0, 0);
5773 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5774 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5775 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5776 IEM_GET_MODRM_REG(pVCpu, bRm));
5777 IEM_MC_ADVANCE_RIP();
5778 IEM_MC_END();
5779 }
5780 else
5781 {
5782 /*
5783 * Register, memory.
5784 */
5785 IEM_MC_BEGIN(0, 2);
5786 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5788
5789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5791 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5792 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5793
5794 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5795 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5796
5797 IEM_MC_ADVANCE_RIP();
5798 IEM_MC_END();
5799 }
5800 return VINF_SUCCESS;
5801}
5802
5803/* Opcode 0xf2 0x0f 0x7f - invalid */
5804
5805
5806
5807/** Opcode 0x0f 0x80. */
5808FNIEMOP_DEF(iemOp_jo_Jv)
5809{
5810 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
5811 IEMOP_HLP_MIN_386();
5812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5813 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5814 {
5815 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5817
5818 IEM_MC_BEGIN(0, 0);
5819 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5820 IEM_MC_REL_JMP_S16(i16Imm);
5821 } IEM_MC_ELSE() {
5822 IEM_MC_ADVANCE_RIP();
5823 } IEM_MC_ENDIF();
5824 IEM_MC_END();
5825 }
5826 else
5827 {
5828 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5830
5831 IEM_MC_BEGIN(0, 0);
5832 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5833 IEM_MC_REL_JMP_S32(i32Imm);
5834 } IEM_MC_ELSE() {
5835 IEM_MC_ADVANCE_RIP();
5836 } IEM_MC_ENDIF();
5837 IEM_MC_END();
5838 }
5839 return VINF_SUCCESS;
5840}
5841
5842
5843/** Opcode 0x0f 0x81. */
5844FNIEMOP_DEF(iemOp_jno_Jv)
5845{
5846 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
5847 IEMOP_HLP_MIN_386();
5848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5849 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5850 {
5851 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5853
5854 IEM_MC_BEGIN(0, 0);
5855 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5856 IEM_MC_ADVANCE_RIP();
5857 } IEM_MC_ELSE() {
5858 IEM_MC_REL_JMP_S16(i16Imm);
5859 } IEM_MC_ENDIF();
5860 IEM_MC_END();
5861 }
5862 else
5863 {
5864 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866
5867 IEM_MC_BEGIN(0, 0);
5868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
5869 IEM_MC_ADVANCE_RIP();
5870 } IEM_MC_ELSE() {
5871 IEM_MC_REL_JMP_S32(i32Imm);
5872 } IEM_MC_ENDIF();
5873 IEM_MC_END();
5874 }
5875 return VINF_SUCCESS;
5876}
5877
5878
5879/** Opcode 0x0f 0x82. */
5880FNIEMOP_DEF(iemOp_jc_Jv)
5881{
5882 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
5883 IEMOP_HLP_MIN_386();
5884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5885 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5886 {
5887 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5889
5890 IEM_MC_BEGIN(0, 0);
5891 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5892 IEM_MC_REL_JMP_S16(i16Imm);
5893 } IEM_MC_ELSE() {
5894 IEM_MC_ADVANCE_RIP();
5895 } IEM_MC_ENDIF();
5896 IEM_MC_END();
5897 }
5898 else
5899 {
5900 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902
5903 IEM_MC_BEGIN(0, 0);
5904 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5905 IEM_MC_REL_JMP_S32(i32Imm);
5906 } IEM_MC_ELSE() {
5907 IEM_MC_ADVANCE_RIP();
5908 } IEM_MC_ENDIF();
5909 IEM_MC_END();
5910 }
5911 return VINF_SUCCESS;
5912}
5913
5914
5915/** Opcode 0x0f 0x83. */
5916FNIEMOP_DEF(iemOp_jnc_Jv)
5917{
5918 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
5919 IEMOP_HLP_MIN_386();
5920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5921 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5922 {
5923 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5925
5926 IEM_MC_BEGIN(0, 0);
5927 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5928 IEM_MC_ADVANCE_RIP();
5929 } IEM_MC_ELSE() {
5930 IEM_MC_REL_JMP_S16(i16Imm);
5931 } IEM_MC_ENDIF();
5932 IEM_MC_END();
5933 }
5934 else
5935 {
5936 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5938
5939 IEM_MC_BEGIN(0, 0);
5940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
5941 IEM_MC_ADVANCE_RIP();
5942 } IEM_MC_ELSE() {
5943 IEM_MC_REL_JMP_S32(i32Imm);
5944 } IEM_MC_ENDIF();
5945 IEM_MC_END();
5946 }
5947 return VINF_SUCCESS;
5948}
5949
5950
5951/** Opcode 0x0f 0x84. */
5952FNIEMOP_DEF(iemOp_je_Jv)
5953{
5954 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
5955 IEMOP_HLP_MIN_386();
5956 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5957 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5958 {
5959 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5961
5962 IEM_MC_BEGIN(0, 0);
5963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5964 IEM_MC_REL_JMP_S16(i16Imm);
5965 } IEM_MC_ELSE() {
5966 IEM_MC_ADVANCE_RIP();
5967 } IEM_MC_ENDIF();
5968 IEM_MC_END();
5969 }
5970 else
5971 {
5972 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
5973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5974
5975 IEM_MC_BEGIN(0, 0);
5976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
5977 IEM_MC_REL_JMP_S32(i32Imm);
5978 } IEM_MC_ELSE() {
5979 IEM_MC_ADVANCE_RIP();
5980 } IEM_MC_ENDIF();
5981 IEM_MC_END();
5982 }
5983 return VINF_SUCCESS;
5984}
5985
5986
5987/** Opcode 0x0f 0x85. */
5988FNIEMOP_DEF(iemOp_jne_Jv)
5989{
5990 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
5991 IEMOP_HLP_MIN_386();
5992 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5993 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
5994 {
5995 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
5996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5997
5998 IEM_MC_BEGIN(0, 0);
5999 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6000 IEM_MC_ADVANCE_RIP();
6001 } IEM_MC_ELSE() {
6002 IEM_MC_REL_JMP_S16(i16Imm);
6003 } IEM_MC_ENDIF();
6004 IEM_MC_END();
6005 }
6006 else
6007 {
6008 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6010
6011 IEM_MC_BEGIN(0, 0);
6012 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6013 IEM_MC_ADVANCE_RIP();
6014 } IEM_MC_ELSE() {
6015 IEM_MC_REL_JMP_S32(i32Imm);
6016 } IEM_MC_ENDIF();
6017 IEM_MC_END();
6018 }
6019 return VINF_SUCCESS;
6020}
6021
6022
6023/** Opcode 0x0f 0x86. */
6024FNIEMOP_DEF(iemOp_jbe_Jv)
6025{
6026 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
6027 IEMOP_HLP_MIN_386();
6028 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6029 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6030 {
6031 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033
6034 IEM_MC_BEGIN(0, 0);
6035 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6036 IEM_MC_REL_JMP_S16(i16Imm);
6037 } IEM_MC_ELSE() {
6038 IEM_MC_ADVANCE_RIP();
6039 } IEM_MC_ENDIF();
6040 IEM_MC_END();
6041 }
6042 else
6043 {
6044 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6046
6047 IEM_MC_BEGIN(0, 0);
6048 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6049 IEM_MC_REL_JMP_S32(i32Imm);
6050 } IEM_MC_ELSE() {
6051 IEM_MC_ADVANCE_RIP();
6052 } IEM_MC_ENDIF();
6053 IEM_MC_END();
6054 }
6055 return VINF_SUCCESS;
6056}
6057
6058
6059/** Opcode 0x0f 0x87. */
6060FNIEMOP_DEF(iemOp_jnbe_Jv)
6061{
6062 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
6063 IEMOP_HLP_MIN_386();
6064 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6065 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6066 {
6067 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069
6070 IEM_MC_BEGIN(0, 0);
6071 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6072 IEM_MC_ADVANCE_RIP();
6073 } IEM_MC_ELSE() {
6074 IEM_MC_REL_JMP_S16(i16Imm);
6075 } IEM_MC_ENDIF();
6076 IEM_MC_END();
6077 }
6078 else
6079 {
6080 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6082
6083 IEM_MC_BEGIN(0, 0);
6084 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6085 IEM_MC_ADVANCE_RIP();
6086 } IEM_MC_ELSE() {
6087 IEM_MC_REL_JMP_S32(i32Imm);
6088 } IEM_MC_ENDIF();
6089 IEM_MC_END();
6090 }
6091 return VINF_SUCCESS;
6092}
6093
6094
6095/** Opcode 0x0f 0x88. */
6096FNIEMOP_DEF(iemOp_js_Jv)
6097{
6098 IEMOP_MNEMONIC(js_Jv, "js Jv");
6099 IEMOP_HLP_MIN_386();
6100 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6101 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6102 {
6103 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6105
6106 IEM_MC_BEGIN(0, 0);
6107 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6108 IEM_MC_REL_JMP_S16(i16Imm);
6109 } IEM_MC_ELSE() {
6110 IEM_MC_ADVANCE_RIP();
6111 } IEM_MC_ENDIF();
6112 IEM_MC_END();
6113 }
6114 else
6115 {
6116 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6118
6119 IEM_MC_BEGIN(0, 0);
6120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6121 IEM_MC_REL_JMP_S32(i32Imm);
6122 } IEM_MC_ELSE() {
6123 IEM_MC_ADVANCE_RIP();
6124 } IEM_MC_ENDIF();
6125 IEM_MC_END();
6126 }
6127 return VINF_SUCCESS;
6128}
6129
6130
6131/** Opcode 0x0f 0x89. */
6132FNIEMOP_DEF(iemOp_jns_Jv)
6133{
6134 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
6135 IEMOP_HLP_MIN_386();
6136 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6137 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6138 {
6139 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6141
6142 IEM_MC_BEGIN(0, 0);
6143 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6144 IEM_MC_ADVANCE_RIP();
6145 } IEM_MC_ELSE() {
6146 IEM_MC_REL_JMP_S16(i16Imm);
6147 } IEM_MC_ENDIF();
6148 IEM_MC_END();
6149 }
6150 else
6151 {
6152 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154
6155 IEM_MC_BEGIN(0, 0);
6156 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6157 IEM_MC_ADVANCE_RIP();
6158 } IEM_MC_ELSE() {
6159 IEM_MC_REL_JMP_S32(i32Imm);
6160 } IEM_MC_ENDIF();
6161 IEM_MC_END();
6162 }
6163 return VINF_SUCCESS;
6164}
6165
6166
6167/** Opcode 0x0f 0x8a. */
6168FNIEMOP_DEF(iemOp_jp_Jv)
6169{
6170 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
6171 IEMOP_HLP_MIN_386();
6172 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6173 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6174 {
6175 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6177
6178 IEM_MC_BEGIN(0, 0);
6179 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6180 IEM_MC_REL_JMP_S16(i16Imm);
6181 } IEM_MC_ELSE() {
6182 IEM_MC_ADVANCE_RIP();
6183 } IEM_MC_ENDIF();
6184 IEM_MC_END();
6185 }
6186 else
6187 {
6188 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6190
6191 IEM_MC_BEGIN(0, 0);
6192 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6193 IEM_MC_REL_JMP_S32(i32Imm);
6194 } IEM_MC_ELSE() {
6195 IEM_MC_ADVANCE_RIP();
6196 } IEM_MC_ENDIF();
6197 IEM_MC_END();
6198 }
6199 return VINF_SUCCESS;
6200}
6201
6202
6203/** Opcode 0x0f 0x8b. */
6204FNIEMOP_DEF(iemOp_jnp_Jv)
6205{
6206 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
6207 IEMOP_HLP_MIN_386();
6208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6209 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6210 {
6211 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6213
6214 IEM_MC_BEGIN(0, 0);
6215 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6216 IEM_MC_ADVANCE_RIP();
6217 } IEM_MC_ELSE() {
6218 IEM_MC_REL_JMP_S16(i16Imm);
6219 } IEM_MC_ENDIF();
6220 IEM_MC_END();
6221 }
6222 else
6223 {
6224 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6226
6227 IEM_MC_BEGIN(0, 0);
6228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6229 IEM_MC_ADVANCE_RIP();
6230 } IEM_MC_ELSE() {
6231 IEM_MC_REL_JMP_S32(i32Imm);
6232 } IEM_MC_ENDIF();
6233 IEM_MC_END();
6234 }
6235 return VINF_SUCCESS;
6236}
6237
6238
6239/** Opcode 0x0f 0x8c. */
6240FNIEMOP_DEF(iemOp_jl_Jv)
6241{
6242 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
6243 IEMOP_HLP_MIN_386();
6244 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6245 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6246 {
6247 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6249
6250 IEM_MC_BEGIN(0, 0);
6251 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6252 IEM_MC_REL_JMP_S16(i16Imm);
6253 } IEM_MC_ELSE() {
6254 IEM_MC_ADVANCE_RIP();
6255 } IEM_MC_ENDIF();
6256 IEM_MC_END();
6257 }
6258 else
6259 {
6260 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6262
6263 IEM_MC_BEGIN(0, 0);
6264 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6265 IEM_MC_REL_JMP_S32(i32Imm);
6266 } IEM_MC_ELSE() {
6267 IEM_MC_ADVANCE_RIP();
6268 } IEM_MC_ENDIF();
6269 IEM_MC_END();
6270 }
6271 return VINF_SUCCESS;
6272}
6273
6274
6275/** Opcode 0x0f 0x8d. */
6276FNIEMOP_DEF(iemOp_jnl_Jv)
6277{
6278 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
6279 IEMOP_HLP_MIN_386();
6280 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6281 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6282 {
6283 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6285
6286 IEM_MC_BEGIN(0, 0);
6287 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6288 IEM_MC_ADVANCE_RIP();
6289 } IEM_MC_ELSE() {
6290 IEM_MC_REL_JMP_S16(i16Imm);
6291 } IEM_MC_ENDIF();
6292 IEM_MC_END();
6293 }
6294 else
6295 {
6296 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6298
6299 IEM_MC_BEGIN(0, 0);
6300 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6301 IEM_MC_ADVANCE_RIP();
6302 } IEM_MC_ELSE() {
6303 IEM_MC_REL_JMP_S32(i32Imm);
6304 } IEM_MC_ENDIF();
6305 IEM_MC_END();
6306 }
6307 return VINF_SUCCESS;
6308}
6309
6310
6311/** Opcode 0x0f 0x8e. */
6312FNIEMOP_DEF(iemOp_jle_Jv)
6313{
6314 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
6315 IEMOP_HLP_MIN_386();
6316 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6317 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6318 {
6319 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6321
6322 IEM_MC_BEGIN(0, 0);
6323 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6324 IEM_MC_REL_JMP_S16(i16Imm);
6325 } IEM_MC_ELSE() {
6326 IEM_MC_ADVANCE_RIP();
6327 } IEM_MC_ENDIF();
6328 IEM_MC_END();
6329 }
6330 else
6331 {
6332 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6334
6335 IEM_MC_BEGIN(0, 0);
6336 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6337 IEM_MC_REL_JMP_S32(i32Imm);
6338 } IEM_MC_ELSE() {
6339 IEM_MC_ADVANCE_RIP();
6340 } IEM_MC_ENDIF();
6341 IEM_MC_END();
6342 }
6343 return VINF_SUCCESS;
6344}
6345
6346
6347/** Opcode 0x0f 0x8f. */
6348FNIEMOP_DEF(iemOp_jnle_Jv)
6349{
6350 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
6351 IEMOP_HLP_MIN_386();
6352 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6353 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6354 {
6355 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6357
6358 IEM_MC_BEGIN(0, 0);
6359 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6360 IEM_MC_ADVANCE_RIP();
6361 } IEM_MC_ELSE() {
6362 IEM_MC_REL_JMP_S16(i16Imm);
6363 } IEM_MC_ENDIF();
6364 IEM_MC_END();
6365 }
6366 else
6367 {
6368 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6370
6371 IEM_MC_BEGIN(0, 0);
6372 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6373 IEM_MC_ADVANCE_RIP();
6374 } IEM_MC_ELSE() {
6375 IEM_MC_REL_JMP_S32(i32Imm);
6376 } IEM_MC_ENDIF();
6377 IEM_MC_END();
6378 }
6379 return VINF_SUCCESS;
6380}
6381
6382
6383/** Opcode 0x0f 0x90. */
6384FNIEMOP_DEF(iemOp_seto_Eb)
6385{
6386 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6387 IEMOP_HLP_MIN_386();
6388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6389
6390 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6391 * any way. AMD says it's "unused", whatever that means. We're
6392 * ignoring for now. */
6393 if (IEM_IS_MODRM_REG_MODE(bRm))
6394 {
6395 /* register target */
6396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6397 IEM_MC_BEGIN(0, 0);
6398 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6399 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6400 } IEM_MC_ELSE() {
6401 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6402 } IEM_MC_ENDIF();
6403 IEM_MC_ADVANCE_RIP();
6404 IEM_MC_END();
6405 }
6406 else
6407 {
6408 /* memory target */
6409 IEM_MC_BEGIN(0, 1);
6410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6414 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6415 } IEM_MC_ELSE() {
6416 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6417 } IEM_MC_ENDIF();
6418 IEM_MC_ADVANCE_RIP();
6419 IEM_MC_END();
6420 }
6421 return VINF_SUCCESS;
6422}
6423
6424
6425/** Opcode 0x0f 0x91. */
6426FNIEMOP_DEF(iemOp_setno_Eb)
6427{
6428 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6429 IEMOP_HLP_MIN_386();
6430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6431
6432 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6433 * any way. AMD says it's "unused", whatever that means. We're
6434 * ignoring for now. */
6435 if (IEM_IS_MODRM_REG_MODE(bRm))
6436 {
6437 /* register target */
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6439 IEM_MC_BEGIN(0, 0);
6440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6441 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6442 } IEM_MC_ELSE() {
6443 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6444 } IEM_MC_ENDIF();
6445 IEM_MC_ADVANCE_RIP();
6446 IEM_MC_END();
6447 }
6448 else
6449 {
6450 /* memory target */
6451 IEM_MC_BEGIN(0, 1);
6452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6456 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6457 } IEM_MC_ELSE() {
6458 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6459 } IEM_MC_ENDIF();
6460 IEM_MC_ADVANCE_RIP();
6461 IEM_MC_END();
6462 }
6463 return VINF_SUCCESS;
6464}
6465
6466
6467/** Opcode 0x0f 0x92. */
6468FNIEMOP_DEF(iemOp_setc_Eb)
6469{
6470 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6471 IEMOP_HLP_MIN_386();
6472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6473
6474 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6475 * any way. AMD says it's "unused", whatever that means. We're
6476 * ignoring for now. */
6477 if (IEM_IS_MODRM_REG_MODE(bRm))
6478 {
6479 /* register target */
6480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6481 IEM_MC_BEGIN(0, 0);
6482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6483 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6484 } IEM_MC_ELSE() {
6485 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6486 } IEM_MC_ENDIF();
6487 IEM_MC_ADVANCE_RIP();
6488 IEM_MC_END();
6489 }
6490 else
6491 {
6492 /* memory target */
6493 IEM_MC_BEGIN(0, 1);
6494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6497 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6498 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6499 } IEM_MC_ELSE() {
6500 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6501 } IEM_MC_ENDIF();
6502 IEM_MC_ADVANCE_RIP();
6503 IEM_MC_END();
6504 }
6505 return VINF_SUCCESS;
6506}
6507
6508
6509/** Opcode 0x0f 0x93. */
6510FNIEMOP_DEF(iemOp_setnc_Eb)
6511{
6512 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6513 IEMOP_HLP_MIN_386();
6514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6515
6516 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6517 * any way. AMD says it's "unused", whatever that means. We're
6518 * ignoring for now. */
6519 if (IEM_IS_MODRM_REG_MODE(bRm))
6520 {
6521 /* register target */
6522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6523 IEM_MC_BEGIN(0, 0);
6524 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6525 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6526 } IEM_MC_ELSE() {
6527 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6528 } IEM_MC_ENDIF();
6529 IEM_MC_ADVANCE_RIP();
6530 IEM_MC_END();
6531 }
6532 else
6533 {
6534 /* memory target */
6535 IEM_MC_BEGIN(0, 1);
6536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6540 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6541 } IEM_MC_ELSE() {
6542 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6543 } IEM_MC_ENDIF();
6544 IEM_MC_ADVANCE_RIP();
6545 IEM_MC_END();
6546 }
6547 return VINF_SUCCESS;
6548}
6549
6550
6551/** Opcode 0x0f 0x94. */
6552FNIEMOP_DEF(iemOp_sete_Eb)
6553{
6554 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6555 IEMOP_HLP_MIN_386();
6556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6557
6558 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6559 * any way. AMD says it's "unused", whatever that means. We're
6560 * ignoring for now. */
6561 if (IEM_IS_MODRM_REG_MODE(bRm))
6562 {
6563 /* register target */
6564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6565 IEM_MC_BEGIN(0, 0);
6566 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6567 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6568 } IEM_MC_ELSE() {
6569 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6570 } IEM_MC_ENDIF();
6571 IEM_MC_ADVANCE_RIP();
6572 IEM_MC_END();
6573 }
6574 else
6575 {
6576 /* memory target */
6577 IEM_MC_BEGIN(0, 1);
6578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6581 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6582 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6583 } IEM_MC_ELSE() {
6584 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6585 } IEM_MC_ENDIF();
6586 IEM_MC_ADVANCE_RIP();
6587 IEM_MC_END();
6588 }
6589 return VINF_SUCCESS;
6590}
6591
6592
6593/** Opcode 0x0f 0x95. */
6594FNIEMOP_DEF(iemOp_setne_Eb)
6595{
6596 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6597 IEMOP_HLP_MIN_386();
6598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6599
6600 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6601 * any way. AMD says it's "unused", whatever that means. We're
6602 * ignoring for now. */
6603 if (IEM_IS_MODRM_REG_MODE(bRm))
6604 {
6605 /* register target */
6606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6607 IEM_MC_BEGIN(0, 0);
6608 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6609 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6610 } IEM_MC_ELSE() {
6611 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6612 } IEM_MC_ENDIF();
6613 IEM_MC_ADVANCE_RIP();
6614 IEM_MC_END();
6615 }
6616 else
6617 {
6618 /* memory target */
6619 IEM_MC_BEGIN(0, 1);
6620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6623 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6624 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6625 } IEM_MC_ELSE() {
6626 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6627 } IEM_MC_ENDIF();
6628 IEM_MC_ADVANCE_RIP();
6629 IEM_MC_END();
6630 }
6631 return VINF_SUCCESS;
6632}
6633
6634
6635/** Opcode 0x0f 0x96. */
6636FNIEMOP_DEF(iemOp_setbe_Eb)
6637{
6638 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
6639 IEMOP_HLP_MIN_386();
6640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6641
6642 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6643 * any way. AMD says it's "unused", whatever that means. We're
6644 * ignoring for now. */
6645 if (IEM_IS_MODRM_REG_MODE(bRm))
6646 {
6647 /* register target */
6648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6649 IEM_MC_BEGIN(0, 0);
6650 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6651 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6652 } IEM_MC_ELSE() {
6653 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6654 } IEM_MC_ENDIF();
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 }
6658 else
6659 {
6660 /* memory target */
6661 IEM_MC_BEGIN(0, 1);
6662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6666 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6667 } IEM_MC_ELSE() {
6668 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6669 } IEM_MC_ENDIF();
6670 IEM_MC_ADVANCE_RIP();
6671 IEM_MC_END();
6672 }
6673 return VINF_SUCCESS;
6674}
6675
6676
6677/** Opcode 0x0f 0x97. */
6678FNIEMOP_DEF(iemOp_setnbe_Eb)
6679{
6680 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
6681 IEMOP_HLP_MIN_386();
6682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6683
6684 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6685 * any way. AMD says it's "unused", whatever that means. We're
6686 * ignoring for now. */
6687 if (IEM_IS_MODRM_REG_MODE(bRm))
6688 {
6689 /* register target */
6690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6691 IEM_MC_BEGIN(0, 0);
6692 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6693 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6694 } IEM_MC_ELSE() {
6695 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6696 } IEM_MC_ENDIF();
6697 IEM_MC_ADVANCE_RIP();
6698 IEM_MC_END();
6699 }
6700 else
6701 {
6702 /* memory target */
6703 IEM_MC_BEGIN(0, 1);
6704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6707 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6708 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6709 } IEM_MC_ELSE() {
6710 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6711 } IEM_MC_ENDIF();
6712 IEM_MC_ADVANCE_RIP();
6713 IEM_MC_END();
6714 }
6715 return VINF_SUCCESS;
6716}
6717
6718
6719/** Opcode 0x0f 0x98. */
6720FNIEMOP_DEF(iemOp_sets_Eb)
6721{
6722 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
6723 IEMOP_HLP_MIN_386();
6724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6725
6726 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6727 * any way. AMD says it's "unused", whatever that means. We're
6728 * ignoring for now. */
6729 if (IEM_IS_MODRM_REG_MODE(bRm))
6730 {
6731 /* register target */
6732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6733 IEM_MC_BEGIN(0, 0);
6734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6735 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6736 } IEM_MC_ELSE() {
6737 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6738 } IEM_MC_ENDIF();
6739 IEM_MC_ADVANCE_RIP();
6740 IEM_MC_END();
6741 }
6742 else
6743 {
6744 /* memory target */
6745 IEM_MC_BEGIN(0, 1);
6746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6749 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6750 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6751 } IEM_MC_ELSE() {
6752 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6753 } IEM_MC_ENDIF();
6754 IEM_MC_ADVANCE_RIP();
6755 IEM_MC_END();
6756 }
6757 return VINF_SUCCESS;
6758}
6759
6760
6761/** Opcode 0x0f 0x99. */
6762FNIEMOP_DEF(iemOp_setns_Eb)
6763{
6764 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
6765 IEMOP_HLP_MIN_386();
6766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6767
6768 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6769 * any way. AMD says it's "unused", whatever that means. We're
6770 * ignoring for now. */
6771 if (IEM_IS_MODRM_REG_MODE(bRm))
6772 {
6773 /* register target */
6774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6775 IEM_MC_BEGIN(0, 0);
6776 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6777 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6778 } IEM_MC_ELSE() {
6779 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6780 } IEM_MC_ENDIF();
6781 IEM_MC_ADVANCE_RIP();
6782 IEM_MC_END();
6783 }
6784 else
6785 {
6786 /* memory target */
6787 IEM_MC_BEGIN(0, 1);
6788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6791 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6792 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6793 } IEM_MC_ELSE() {
6794 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6795 } IEM_MC_ENDIF();
6796 IEM_MC_ADVANCE_RIP();
6797 IEM_MC_END();
6798 }
6799 return VINF_SUCCESS;
6800}
6801
6802
6803/** Opcode 0x0f 0x9a. */
6804FNIEMOP_DEF(iemOp_setp_Eb)
6805{
6806 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
6807 IEMOP_HLP_MIN_386();
6808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6809
6810 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6811 * any way. AMD says it's "unused", whatever that means. We're
6812 * ignoring for now. */
6813 if (IEM_IS_MODRM_REG_MODE(bRm))
6814 {
6815 /* register target */
6816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6817 IEM_MC_BEGIN(0, 0);
6818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6819 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6820 } IEM_MC_ELSE() {
6821 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6822 } IEM_MC_ENDIF();
6823 IEM_MC_ADVANCE_RIP();
6824 IEM_MC_END();
6825 }
6826 else
6827 {
6828 /* memory target */
6829 IEM_MC_BEGIN(0, 1);
6830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6833 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6834 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6835 } IEM_MC_ELSE() {
6836 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6837 } IEM_MC_ENDIF();
6838 IEM_MC_ADVANCE_RIP();
6839 IEM_MC_END();
6840 }
6841 return VINF_SUCCESS;
6842}
6843
6844
6845/** Opcode 0x0f 0x9b. */
6846FNIEMOP_DEF(iemOp_setnp_Eb)
6847{
6848 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
6849 IEMOP_HLP_MIN_386();
6850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6851
6852 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6853 * any way. AMD says it's "unused", whatever that means. We're
6854 * ignoring for now. */
6855 if (IEM_IS_MODRM_REG_MODE(bRm))
6856 {
6857 /* register target */
6858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6859 IEM_MC_BEGIN(0, 0);
6860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6861 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6862 } IEM_MC_ELSE() {
6863 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6864 } IEM_MC_ENDIF();
6865 IEM_MC_ADVANCE_RIP();
6866 IEM_MC_END();
6867 }
6868 else
6869 {
6870 /* memory target */
6871 IEM_MC_BEGIN(0, 1);
6872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6875 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6876 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6877 } IEM_MC_ELSE() {
6878 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6879 } IEM_MC_ENDIF();
6880 IEM_MC_ADVANCE_RIP();
6881 IEM_MC_END();
6882 }
6883 return VINF_SUCCESS;
6884}
6885
6886
6887/** Opcode 0x0f 0x9c. */
6888FNIEMOP_DEF(iemOp_setl_Eb)
6889{
6890 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
6891 IEMOP_HLP_MIN_386();
6892 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6893
6894 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6895 * any way. AMD says it's "unused", whatever that means. We're
6896 * ignoring for now. */
6897 if (IEM_IS_MODRM_REG_MODE(bRm))
6898 {
6899 /* register target */
6900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6901 IEM_MC_BEGIN(0, 0);
6902 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6903 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6904 } IEM_MC_ELSE() {
6905 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6906 } IEM_MC_ENDIF();
6907 IEM_MC_ADVANCE_RIP();
6908 IEM_MC_END();
6909 }
6910 else
6911 {
6912 /* memory target */
6913 IEM_MC_BEGIN(0, 1);
6914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6917 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6918 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6919 } IEM_MC_ELSE() {
6920 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6921 } IEM_MC_ENDIF();
6922 IEM_MC_ADVANCE_RIP();
6923 IEM_MC_END();
6924 }
6925 return VINF_SUCCESS;
6926}
6927
6928
6929/** Opcode 0x0f 0x9d. */
6930FNIEMOP_DEF(iemOp_setnl_Eb)
6931{
6932 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
6933 IEMOP_HLP_MIN_386();
6934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6935
6936 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6937 * any way. AMD says it's "unused", whatever that means. We're
6938 * ignoring for now. */
6939 if (IEM_IS_MODRM_REG_MODE(bRm))
6940 {
6941 /* register target */
6942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6943 IEM_MC_BEGIN(0, 0);
6944 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6945 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6946 } IEM_MC_ELSE() {
6947 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6948 } IEM_MC_ENDIF();
6949 IEM_MC_ADVANCE_RIP();
6950 IEM_MC_END();
6951 }
6952 else
6953 {
6954 /* memory target */
6955 IEM_MC_BEGIN(0, 1);
6956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6959 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6960 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6961 } IEM_MC_ELSE() {
6962 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6963 } IEM_MC_ENDIF();
6964 IEM_MC_ADVANCE_RIP();
6965 IEM_MC_END();
6966 }
6967 return VINF_SUCCESS;
6968}
6969
6970
6971/** Opcode 0x0f 0x9e. */
6972FNIEMOP_DEF(iemOp_setle_Eb)
6973{
6974 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
6975 IEMOP_HLP_MIN_386();
6976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6977
6978 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6979 * any way. AMD says it's "unused", whatever that means. We're
6980 * ignoring for now. */
6981 if (IEM_IS_MODRM_REG_MODE(bRm))
6982 {
6983 /* register target */
6984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6985 IEM_MC_BEGIN(0, 0);
6986 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6987 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6988 } IEM_MC_ELSE() {
6989 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6990 } IEM_MC_ENDIF();
6991 IEM_MC_ADVANCE_RIP();
6992 IEM_MC_END();
6993 }
6994 else
6995 {
6996 /* memory target */
6997 IEM_MC_BEGIN(0, 1);
6998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7001 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7002 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7003 } IEM_MC_ELSE() {
7004 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7005 } IEM_MC_ENDIF();
7006 IEM_MC_ADVANCE_RIP();
7007 IEM_MC_END();
7008 }
7009 return VINF_SUCCESS;
7010}
7011
7012
7013/** Opcode 0x0f 0x9f. */
7014FNIEMOP_DEF(iemOp_setnle_Eb)
7015{
7016 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
7017 IEMOP_HLP_MIN_386();
7018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7019
7020 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7021 * any way. AMD says it's "unused", whatever that means. We're
7022 * ignoring for now. */
7023 if (IEM_IS_MODRM_REG_MODE(bRm))
7024 {
7025 /* register target */
7026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7027 IEM_MC_BEGIN(0, 0);
7028 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7029 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7030 } IEM_MC_ELSE() {
7031 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7032 } IEM_MC_ENDIF();
7033 IEM_MC_ADVANCE_RIP();
7034 IEM_MC_END();
7035 }
7036 else
7037 {
7038 /* memory target */
7039 IEM_MC_BEGIN(0, 1);
7040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7043 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7044 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7045 } IEM_MC_ELSE() {
7046 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7047 } IEM_MC_ENDIF();
7048 IEM_MC_ADVANCE_RIP();
7049 IEM_MC_END();
7050 }
7051 return VINF_SUCCESS;
7052}
7053
7054
7055/**
7056 * Common 'push segment-register' helper.
7057 */
7058FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
7059{
7060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7061 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
7062 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7063
7064 switch (pVCpu->iem.s.enmEffOpSize)
7065 {
7066 case IEMMODE_16BIT:
7067 IEM_MC_BEGIN(0, 1);
7068 IEM_MC_LOCAL(uint16_t, u16Value);
7069 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
7070 IEM_MC_PUSH_U16(u16Value);
7071 IEM_MC_ADVANCE_RIP();
7072 IEM_MC_END();
7073 break;
7074
7075 case IEMMODE_32BIT:
7076 IEM_MC_BEGIN(0, 1);
7077 IEM_MC_LOCAL(uint32_t, u32Value);
7078 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
7079 IEM_MC_PUSH_U32_SREG(u32Value);
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 break;
7083
7084 case IEMMODE_64BIT:
7085 IEM_MC_BEGIN(0, 1);
7086 IEM_MC_LOCAL(uint64_t, u64Value);
7087 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
7088 IEM_MC_PUSH_U64(u64Value);
7089 IEM_MC_ADVANCE_RIP();
7090 IEM_MC_END();
7091 break;
7092 }
7093
7094 return VINF_SUCCESS;
7095}
7096
7097
7098/** Opcode 0x0f 0xa0. */
7099FNIEMOP_DEF(iemOp_push_fs)
7100{
7101 IEMOP_MNEMONIC(push_fs, "push fs");
7102 IEMOP_HLP_MIN_386();
7103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7104 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
7105}
7106
7107
7108/** Opcode 0x0f 0xa1. */
7109FNIEMOP_DEF(iemOp_pop_fs)
7110{
7111 IEMOP_MNEMONIC(pop_fs, "pop fs");
7112 IEMOP_HLP_MIN_386();
7113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7114 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
7115}
7116
7117
7118/** Opcode 0x0f 0xa2. */
7119FNIEMOP_DEF(iemOp_cpuid)
7120{
7121 IEMOP_MNEMONIC(cpuid, "cpuid");
7122 IEMOP_HLP_MIN_486(); /* not all 486es. */
7123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7124 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
7125}
7126
7127
7128/**
7129 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
7130 * iemOp_bts_Ev_Gv.
7131 */
7132FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
7133{
7134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7135 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7136
7137 if (IEM_IS_MODRM_REG_MODE(bRm))
7138 {
7139 /* register destination. */
7140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7141 switch (pVCpu->iem.s.enmEffOpSize)
7142 {
7143 case IEMMODE_16BIT:
7144 IEM_MC_BEGIN(3, 0);
7145 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7146 IEM_MC_ARG(uint16_t, u16Src, 1);
7147 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7148
7149 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7150 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
7151 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7152 IEM_MC_REF_EFLAGS(pEFlags);
7153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7154
7155 IEM_MC_ADVANCE_RIP();
7156 IEM_MC_END();
7157 return VINF_SUCCESS;
7158
7159 case IEMMODE_32BIT:
7160 IEM_MC_BEGIN(3, 0);
7161 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7162 IEM_MC_ARG(uint32_t, u32Src, 1);
7163 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7164
7165 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7166 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
7167 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7168 IEM_MC_REF_EFLAGS(pEFlags);
7169 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7170
7171 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7172 IEM_MC_ADVANCE_RIP();
7173 IEM_MC_END();
7174 return VINF_SUCCESS;
7175
7176 case IEMMODE_64BIT:
7177 IEM_MC_BEGIN(3, 0);
7178 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7179 IEM_MC_ARG(uint64_t, u64Src, 1);
7180 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7181
7182 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7183 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
7184 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7185 IEM_MC_REF_EFLAGS(pEFlags);
7186 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7187
7188 IEM_MC_ADVANCE_RIP();
7189 IEM_MC_END();
7190 return VINF_SUCCESS;
7191
7192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7193 }
7194 }
7195 else
7196 {
7197 /* memory destination. */
7198
7199 uint32_t fAccess;
7200 if (pImpl->pfnLockedU16)
7201 fAccess = IEM_ACCESS_DATA_RW;
7202 else /* BT */
7203 fAccess = IEM_ACCESS_DATA_R;
7204
7205 /** @todo test negative bit offsets! */
7206 switch (pVCpu->iem.s.enmEffOpSize)
7207 {
7208 case IEMMODE_16BIT:
7209 IEM_MC_BEGIN(3, 2);
7210 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7211 IEM_MC_ARG(uint16_t, u16Src, 1);
7212 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7214 IEM_MC_LOCAL(int16_t, i16AddrAdj);
7215
7216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7217 if (pImpl->pfnLockedU16)
7218 IEMOP_HLP_DONE_DECODING();
7219 else
7220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7221 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7222 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
7223 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
7224 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
7225 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
7226 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
7227 IEM_MC_FETCH_EFLAGS(EFlags);
7228
7229 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7230 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7231 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7232 else
7233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7234 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7235
7236 IEM_MC_COMMIT_EFLAGS(EFlags);
7237 IEM_MC_ADVANCE_RIP();
7238 IEM_MC_END();
7239 return VINF_SUCCESS;
7240
7241 case IEMMODE_32BIT:
7242 IEM_MC_BEGIN(3, 2);
7243 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7244 IEM_MC_ARG(uint32_t, u32Src, 1);
7245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7247 IEM_MC_LOCAL(int32_t, i32AddrAdj);
7248
7249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7250 if (pImpl->pfnLockedU16)
7251 IEMOP_HLP_DONE_DECODING();
7252 else
7253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7254 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7255 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
7256 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
7257 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
7258 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
7259 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
7260 IEM_MC_FETCH_EFLAGS(EFlags);
7261
7262 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7265 else
7266 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7267 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7268
7269 IEM_MC_COMMIT_EFLAGS(EFlags);
7270 IEM_MC_ADVANCE_RIP();
7271 IEM_MC_END();
7272 return VINF_SUCCESS;
7273
7274 case IEMMODE_64BIT:
7275 IEM_MC_BEGIN(3, 2);
7276 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7277 IEM_MC_ARG(uint64_t, u64Src, 1);
7278 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7280 IEM_MC_LOCAL(int64_t, i64AddrAdj);
7281
7282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7283 if (pImpl->pfnLockedU16)
7284 IEMOP_HLP_DONE_DECODING();
7285 else
7286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7287 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7288 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
7289 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
7290 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
7291 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
7292 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
7293 IEM_MC_FETCH_EFLAGS(EFlags);
7294
7295 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7296 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7298 else
7299 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7300 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7301
7302 IEM_MC_COMMIT_EFLAGS(EFlags);
7303 IEM_MC_ADVANCE_RIP();
7304 IEM_MC_END();
7305 return VINF_SUCCESS;
7306
7307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7308 }
7309 }
7310}
7311
7312
7313/** Opcode 0x0f 0xa3. */
7314FNIEMOP_DEF(iemOp_bt_Ev_Gv)
7315{
7316 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
7317 IEMOP_HLP_MIN_386();
7318 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
7319}
7320
7321
7322/**
7323 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
7324 */
7325FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
7326{
7327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7329
7330 if (IEM_IS_MODRM_REG_MODE(bRm))
7331 {
7332 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7334
7335 switch (pVCpu->iem.s.enmEffOpSize)
7336 {
7337 case IEMMODE_16BIT:
7338 IEM_MC_BEGIN(4, 0);
7339 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7340 IEM_MC_ARG(uint16_t, u16Src, 1);
7341 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7342 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7343
7344 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7345 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7346 IEM_MC_REF_EFLAGS(pEFlags);
7347 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7348
7349 IEM_MC_ADVANCE_RIP();
7350 IEM_MC_END();
7351 return VINF_SUCCESS;
7352
7353 case IEMMODE_32BIT:
7354 IEM_MC_BEGIN(4, 0);
7355 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7356 IEM_MC_ARG(uint32_t, u32Src, 1);
7357 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7358 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7359
7360 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7361 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7362 IEM_MC_REF_EFLAGS(pEFlags);
7363 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7364
7365 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7366 IEM_MC_ADVANCE_RIP();
7367 IEM_MC_END();
7368 return VINF_SUCCESS;
7369
7370 case IEMMODE_64BIT:
7371 IEM_MC_BEGIN(4, 0);
7372 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7373 IEM_MC_ARG(uint64_t, u64Src, 1);
7374 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7375 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7376
7377 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7378 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7379 IEM_MC_REF_EFLAGS(pEFlags);
7380 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7381
7382 IEM_MC_ADVANCE_RIP();
7383 IEM_MC_END();
7384 return VINF_SUCCESS;
7385
7386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7387 }
7388 }
7389 else
7390 {
7391 switch (pVCpu->iem.s.enmEffOpSize)
7392 {
7393 case IEMMODE_16BIT:
7394 IEM_MC_BEGIN(4, 2);
7395 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7396 IEM_MC_ARG(uint16_t, u16Src, 1);
7397 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7398 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7400
7401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7402 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7403 IEM_MC_ASSIGN(cShiftArg, cShift);
7404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7405 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7406 IEM_MC_FETCH_EFLAGS(EFlags);
7407 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7408 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7409
7410 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7411 IEM_MC_COMMIT_EFLAGS(EFlags);
7412 IEM_MC_ADVANCE_RIP();
7413 IEM_MC_END();
7414 return VINF_SUCCESS;
7415
7416 case IEMMODE_32BIT:
7417 IEM_MC_BEGIN(4, 2);
7418 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7419 IEM_MC_ARG(uint32_t, u32Src, 1);
7420 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7421 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7423
7424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7425 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7426 IEM_MC_ASSIGN(cShiftArg, cShift);
7427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7428 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7429 IEM_MC_FETCH_EFLAGS(EFlags);
7430 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7431 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7432
7433 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7434 IEM_MC_COMMIT_EFLAGS(EFlags);
7435 IEM_MC_ADVANCE_RIP();
7436 IEM_MC_END();
7437 return VINF_SUCCESS;
7438
7439 case IEMMODE_64BIT:
7440 IEM_MC_BEGIN(4, 2);
7441 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7442 IEM_MC_ARG(uint64_t, u64Src, 1);
7443 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7444 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7446
7447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7448 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7449 IEM_MC_ASSIGN(cShiftArg, cShift);
7450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7451 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7452 IEM_MC_FETCH_EFLAGS(EFlags);
7453 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7454 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7455
7456 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7457 IEM_MC_COMMIT_EFLAGS(EFlags);
7458 IEM_MC_ADVANCE_RIP();
7459 IEM_MC_END();
7460 return VINF_SUCCESS;
7461
7462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7463 }
7464 }
7465}
7466
7467
7468/**
7469 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7470 */
7471FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7472{
7473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7474 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7475
7476 if (IEM_IS_MODRM_REG_MODE(bRm))
7477 {
7478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7479
7480 switch (pVCpu->iem.s.enmEffOpSize)
7481 {
7482 case IEMMODE_16BIT:
7483 IEM_MC_BEGIN(4, 0);
7484 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7485 IEM_MC_ARG(uint16_t, u16Src, 1);
7486 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7487 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7488
7489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7491 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7492 IEM_MC_REF_EFLAGS(pEFlags);
7493 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7494
7495 IEM_MC_ADVANCE_RIP();
7496 IEM_MC_END();
7497 return VINF_SUCCESS;
7498
7499 case IEMMODE_32BIT:
7500 IEM_MC_BEGIN(4, 0);
7501 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7502 IEM_MC_ARG(uint32_t, u32Src, 1);
7503 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7504 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7505
7506 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7507 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7508 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7509 IEM_MC_REF_EFLAGS(pEFlags);
7510 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7511
7512 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7513 IEM_MC_ADVANCE_RIP();
7514 IEM_MC_END();
7515 return VINF_SUCCESS;
7516
7517 case IEMMODE_64BIT:
7518 IEM_MC_BEGIN(4, 0);
7519 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7520 IEM_MC_ARG(uint64_t, u64Src, 1);
7521 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7522 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7523
7524 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7525 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7526 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7527 IEM_MC_REF_EFLAGS(pEFlags);
7528 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7529
7530 IEM_MC_ADVANCE_RIP();
7531 IEM_MC_END();
7532 return VINF_SUCCESS;
7533
7534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7535 }
7536 }
7537 else
7538 {
7539 switch (pVCpu->iem.s.enmEffOpSize)
7540 {
7541 case IEMMODE_16BIT:
7542 IEM_MC_BEGIN(4, 2);
7543 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7544 IEM_MC_ARG(uint16_t, u16Src, 1);
7545 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7546 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7548
7549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7551 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7552 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7553 IEM_MC_FETCH_EFLAGS(EFlags);
7554 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7555 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7556
7557 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7558 IEM_MC_COMMIT_EFLAGS(EFlags);
7559 IEM_MC_ADVANCE_RIP();
7560 IEM_MC_END();
7561 return VINF_SUCCESS;
7562
7563 case IEMMODE_32BIT:
7564 IEM_MC_BEGIN(4, 2);
7565 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7566 IEM_MC_ARG(uint32_t, u32Src, 1);
7567 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7568 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7570
7571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7573 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7574 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7575 IEM_MC_FETCH_EFLAGS(EFlags);
7576 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7577 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7578
7579 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7580 IEM_MC_COMMIT_EFLAGS(EFlags);
7581 IEM_MC_ADVANCE_RIP();
7582 IEM_MC_END();
7583 return VINF_SUCCESS;
7584
7585 case IEMMODE_64BIT:
7586 IEM_MC_BEGIN(4, 2);
7587 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7588 IEM_MC_ARG(uint64_t, u64Src, 1);
7589 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7592
7593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7595 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7596 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7597 IEM_MC_FETCH_EFLAGS(EFlags);
7598 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7599 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7600
7601 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7602 IEM_MC_COMMIT_EFLAGS(EFlags);
7603 IEM_MC_ADVANCE_RIP();
7604 IEM_MC_END();
7605 return VINF_SUCCESS;
7606
7607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7608 }
7609 }
7610}
7611
7612
7613
7614/** Opcode 0x0f 0xa4. */
7615FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7616{
7617 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7618 IEMOP_HLP_MIN_386();
7619 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7620}
7621
7622
7623/** Opcode 0x0f 0xa5. */
7624FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7625{
7626 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7627 IEMOP_HLP_MIN_386();
7628 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7629}
7630
7631
7632/** Opcode 0x0f 0xa8. */
7633FNIEMOP_DEF(iemOp_push_gs)
7634{
7635 IEMOP_MNEMONIC(push_gs, "push gs");
7636 IEMOP_HLP_MIN_386();
7637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7638 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
7639}
7640
7641
7642/** Opcode 0x0f 0xa9. */
7643FNIEMOP_DEF(iemOp_pop_gs)
7644{
7645 IEMOP_MNEMONIC(pop_gs, "pop gs");
7646 IEMOP_HLP_MIN_386();
7647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7648 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
7649}
7650
7651
7652/** Opcode 0x0f 0xaa. */
7653FNIEMOP_DEF(iemOp_rsm)
7654{
7655 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
7656 IEMOP_HLP_MIN_386(); /* 386SL and later. */
7657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7658 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
7659}
7660
7661
7662
7663/** Opcode 0x0f 0xab. */
7664FNIEMOP_DEF(iemOp_bts_Ev_Gv)
7665{
7666 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
7667 IEMOP_HLP_MIN_386();
7668 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
7669}
7670
7671
7672/** Opcode 0x0f 0xac. */
7673FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
7674{
7675 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
7676 IEMOP_HLP_MIN_386();
7677 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7678}
7679
7680
7681/** Opcode 0x0f 0xad. */
7682FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
7683{
7684 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
7685 IEMOP_HLP_MIN_386();
7686 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7687}
7688
7689
7690/** Opcode 0x0f 0xae mem/0. */
7691FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
7692{
7693 IEMOP_MNEMONIC(fxsave, "fxsave m512");
7694 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7695 return IEMOP_RAISE_INVALID_OPCODE();
7696
7697 IEM_MC_BEGIN(3, 1);
7698 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7699 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7700 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7703 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7704 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7705 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
7706 IEM_MC_END();
7707 return VINF_SUCCESS;
7708}
7709
7710
7711/** Opcode 0x0f 0xae mem/1. */
7712FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
7713{
7714 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
7715 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7716 return IEMOP_RAISE_INVALID_OPCODE();
7717
7718 IEM_MC_BEGIN(3, 1);
7719 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7720 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7721 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7724 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7725 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7726 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7727 IEM_MC_END();
7728 return VINF_SUCCESS;
7729}
7730
7731
7732/**
7733 * @opmaps grp15
7734 * @opcode !11/2
7735 * @oppfx none
7736 * @opcpuid sse
7737 * @opgroup og_sse_mxcsrsm
7738 * @opxcpttype 5
7739 * @optest op1=0 -> mxcsr=0
7740 * @optest op1=0x2083 -> mxcsr=0x2083
7741 * @optest op1=0xfffffffe -> value.xcpt=0xd
7742 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
7743 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
7744 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
7745 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
7746 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
7747 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7748 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7749 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7750 */
7751FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
7752{
7753 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7754 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7755 return IEMOP_RAISE_INVALID_OPCODE();
7756
7757 IEM_MC_BEGIN(2, 0);
7758 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7759 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7762 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7763 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7764 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
7765 IEM_MC_END();
7766 return VINF_SUCCESS;
7767}
7768
7769
7770/**
7771 * @opmaps grp15
7772 * @opcode !11/3
7773 * @oppfx none
7774 * @opcpuid sse
7775 * @opgroup og_sse_mxcsrsm
7776 * @opxcpttype 5
7777 * @optest mxcsr=0 -> op1=0
7778 * @optest mxcsr=0x2083 -> op1=0x2083
7779 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
7780 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
7781 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
7782 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
7783 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
7784 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7785 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7786 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7787 */
7788FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
7789{
7790 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7791 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7792 return IEMOP_RAISE_INVALID_OPCODE();
7793
7794 IEM_MC_BEGIN(2, 0);
7795 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7796 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7799 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7800 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7801 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
7802 IEM_MC_END();
7803 return VINF_SUCCESS;
7804}
7805
7806
7807/**
7808 * @opmaps grp15
7809 * @opcode !11/4
7810 * @oppfx none
7811 * @opcpuid xsave
7812 * @opgroup og_system
7813 * @opxcpttype none
7814 */
7815FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
7816{
7817 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
7818 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7819 return IEMOP_RAISE_INVALID_OPCODE();
7820
7821 IEM_MC_BEGIN(3, 0);
7822 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7823 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7824 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7827 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7828 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7829 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
7830 IEM_MC_END();
7831 return VINF_SUCCESS;
7832}
7833
7834
7835/**
7836 * @opmaps grp15
7837 * @opcode !11/5
7838 * @oppfx none
7839 * @opcpuid xsave
7840 * @opgroup og_system
7841 * @opxcpttype none
7842 */
7843FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
7844{
7845 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
7846 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
7847 return IEMOP_RAISE_INVALID_OPCODE();
7848
7849 IEM_MC_BEGIN(3, 0);
7850 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7851 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7852 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7855 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7856 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7857 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7858 IEM_MC_END();
7859 return VINF_SUCCESS;
7860}
7861
7862/** Opcode 0x0f 0xae mem/6. */
7863FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
7864
7865/**
7866 * @opmaps grp15
7867 * @opcode !11/7
7868 * @oppfx none
7869 * @opcpuid clfsh
7870 * @opgroup og_cachectl
7871 * @optest op1=1 ->
7872 */
7873FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
7874{
7875 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7876 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
7877 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7878
7879 IEM_MC_BEGIN(2, 0);
7880 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7881 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7884 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7885 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7886 IEM_MC_END();
7887 return VINF_SUCCESS;
7888}
7889
7890/**
7891 * @opmaps grp15
7892 * @opcode !11/7
7893 * @oppfx 0x66
7894 * @opcpuid clflushopt
7895 * @opgroup og_cachectl
7896 * @optest op1=1 ->
7897 */
7898FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
7899{
7900 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7901 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
7902 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
7903
7904 IEM_MC_BEGIN(2, 0);
7905 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7906 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7909 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7910 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
7911 IEM_MC_END();
7912 return VINF_SUCCESS;
7913}
7914
7915
7916/** Opcode 0x0f 0xae 11b/5. */
7917FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
7918{
7919 RT_NOREF_PV(bRm);
7920 IEMOP_MNEMONIC(lfence, "lfence");
7921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7922 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7923 return IEMOP_RAISE_INVALID_OPCODE();
7924
7925 IEM_MC_BEGIN(0, 0);
7926#ifndef RT_ARCH_ARM64
7927 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7928#endif
7929 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
7930#ifndef RT_ARCH_ARM64
7931 else
7932 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7933#endif
7934 IEM_MC_ADVANCE_RIP();
7935 IEM_MC_END();
7936 return VINF_SUCCESS;
7937}
7938
7939
7940/** Opcode 0x0f 0xae 11b/6. */
7941FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
7942{
7943 RT_NOREF_PV(bRm);
7944 IEMOP_MNEMONIC(mfence, "mfence");
7945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7946 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7947 return IEMOP_RAISE_INVALID_OPCODE();
7948
7949 IEM_MC_BEGIN(0, 0);
7950#ifndef RT_ARCH_ARM64
7951 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7952#endif
7953 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
7954#ifndef RT_ARCH_ARM64
7955 else
7956 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7957#endif
7958 IEM_MC_ADVANCE_RIP();
7959 IEM_MC_END();
7960 return VINF_SUCCESS;
7961}
7962
7963
7964/** Opcode 0x0f 0xae 11b/7. */
7965FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
7966{
7967 RT_NOREF_PV(bRm);
7968 IEMOP_MNEMONIC(sfence, "sfence");
7969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7970 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7971 return IEMOP_RAISE_INVALID_OPCODE();
7972
7973 IEM_MC_BEGIN(0, 0);
7974#ifndef RT_ARCH_ARM64
7975 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
7976#endif
7977 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
7978#ifndef RT_ARCH_ARM64
7979 else
7980 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
7981#endif
7982 IEM_MC_ADVANCE_RIP();
7983 IEM_MC_END();
7984 return VINF_SUCCESS;
7985}
7986
7987
7988/** Opcode 0xf3 0x0f 0xae 11b/0. */
7989FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
7990{
7991 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
7992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7993 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
7994 {
7995 IEM_MC_BEGIN(1, 0);
7996 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
7997 IEM_MC_ARG(uint64_t, u64Dst, 0);
7998 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
7999 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8000 IEM_MC_ADVANCE_RIP();
8001 IEM_MC_END();
8002 }
8003 else
8004 {
8005 IEM_MC_BEGIN(1, 0);
8006 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8007 IEM_MC_ARG(uint32_t, u32Dst, 0);
8008 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
8009 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8010 IEM_MC_ADVANCE_RIP();
8011 IEM_MC_END();
8012 }
8013 return VINF_SUCCESS;
8014}
8015
8016
8017/** Opcode 0xf3 0x0f 0xae 11b/1. */
8018FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
8019{
8020 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
8021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8022 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8023 {
8024 IEM_MC_BEGIN(1, 0);
8025 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8026 IEM_MC_ARG(uint64_t, u64Dst, 0);
8027 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
8028 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8029 IEM_MC_ADVANCE_RIP();
8030 IEM_MC_END();
8031 }
8032 else
8033 {
8034 IEM_MC_BEGIN(1, 0);
8035 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8036 IEM_MC_ARG(uint32_t, u32Dst, 0);
8037 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
8038 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8039 IEM_MC_ADVANCE_RIP();
8040 IEM_MC_END();
8041 }
8042 return VINF_SUCCESS;
8043}
8044
8045
8046/** Opcode 0xf3 0x0f 0xae 11b/2. */
8047FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
8048{
8049 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
8050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8051 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8052 {
8053 IEM_MC_BEGIN(1, 0);
8054 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8055 IEM_MC_ARG(uint64_t, u64Dst, 0);
8056 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8057 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8058 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
8059 IEM_MC_ADVANCE_RIP();
8060 IEM_MC_END();
8061 }
8062 else
8063 {
8064 IEM_MC_BEGIN(1, 0);
8065 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8066 IEM_MC_ARG(uint32_t, u32Dst, 0);
8067 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8068 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
8069 IEM_MC_ADVANCE_RIP();
8070 IEM_MC_END();
8071 }
8072 return VINF_SUCCESS;
8073}
8074
8075
8076/** Opcode 0xf3 0x0f 0xae 11b/3. */
8077FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
8078{
8079 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
8080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8081 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8082 {
8083 IEM_MC_BEGIN(1, 0);
8084 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8085 IEM_MC_ARG(uint64_t, u64Dst, 0);
8086 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8087 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8088 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
8089 IEM_MC_ADVANCE_RIP();
8090 IEM_MC_END();
8091 }
8092 else
8093 {
8094 IEM_MC_BEGIN(1, 0);
8095 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8096 IEM_MC_ARG(uint32_t, u32Dst, 0);
8097 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8098 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
8099 IEM_MC_ADVANCE_RIP();
8100 IEM_MC_END();
8101 }
8102 return VINF_SUCCESS;
8103}
8104
8105
8106/**
8107 * Group 15 jump table for register variant.
8108 */
8109IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
8110{ /* pfx: none, 066h, 0f3h, 0f2h */
8111 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
8112 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
8113 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
8114 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
8115 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8116 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8117 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8118 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8119};
8120AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
8121
8122
8123/**
8124 * Group 15 jump table for memory variant.
8125 */
8126IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
8127{ /* pfx: none, 066h, 0f3h, 0f2h */
8128 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8129 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8130 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8131 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8132 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8133 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8134 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8135 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8136};
8137AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
8138
8139
8140/** Opcode 0x0f 0xae. */
8141FNIEMOP_DEF(iemOp_Grp15)
8142{
8143 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
8144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8145 if (IEM_IS_MODRM_REG_MODE(bRm))
8146 /* register, register */
8147 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8148 + pVCpu->iem.s.idxPrefix], bRm);
8149 /* memory, register */
8150 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8151 + pVCpu->iem.s.idxPrefix], bRm);
8152}
8153
8154
8155/** Opcode 0x0f 0xaf. */
8156FNIEMOP_DEF(iemOp_imul_Gv_Ev)
8157{
8158 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
8159 IEMOP_HLP_MIN_386();
8160 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8161 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
8162}
8163
8164
8165/** Opcode 0x0f 0xb0. */
8166FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
8167{
8168 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
8169 IEMOP_HLP_MIN_486();
8170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8171
8172 if (IEM_IS_MODRM_REG_MODE(bRm))
8173 {
8174 IEMOP_HLP_DONE_DECODING();
8175 IEM_MC_BEGIN(4, 0);
8176 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8177 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8178 IEM_MC_ARG(uint8_t, u8Src, 2);
8179 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8180
8181 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8182 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8183 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
8184 IEM_MC_REF_EFLAGS(pEFlags);
8185 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8186 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8187 else
8188 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8189
8190 IEM_MC_ADVANCE_RIP();
8191 IEM_MC_END();
8192 }
8193 else
8194 {
8195 IEM_MC_BEGIN(4, 3);
8196 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8197 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8198 IEM_MC_ARG(uint8_t, u8Src, 2);
8199 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8201 IEM_MC_LOCAL(uint8_t, u8Al);
8202
8203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8204 IEMOP_HLP_DONE_DECODING();
8205 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8206 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8207 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
8208 IEM_MC_FETCH_EFLAGS(EFlags);
8209 IEM_MC_REF_LOCAL(pu8Al, u8Al);
8210 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8211 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8212 else
8213 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8214
8215 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8216 IEM_MC_COMMIT_EFLAGS(EFlags);
8217 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
8218 IEM_MC_ADVANCE_RIP();
8219 IEM_MC_END();
8220 }
8221 return VINF_SUCCESS;
8222}
8223
8224/** Opcode 0x0f 0xb1. */
8225FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
8226{
8227 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
8228 IEMOP_HLP_MIN_486();
8229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8230
8231 if (IEM_IS_MODRM_REG_MODE(bRm))
8232 {
8233 IEMOP_HLP_DONE_DECODING();
8234 switch (pVCpu->iem.s.enmEffOpSize)
8235 {
8236 case IEMMODE_16BIT:
8237 IEM_MC_BEGIN(4, 0);
8238 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8239 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8240 IEM_MC_ARG(uint16_t, u16Src, 2);
8241 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8242
8243 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8244 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8245 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
8246 IEM_MC_REF_EFLAGS(pEFlags);
8247 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8249 else
8250 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8251
8252 IEM_MC_ADVANCE_RIP();
8253 IEM_MC_END();
8254 return VINF_SUCCESS;
8255
8256 case IEMMODE_32BIT:
8257 IEM_MC_BEGIN(4, 0);
8258 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8259 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8260 IEM_MC_ARG(uint32_t, u32Src, 2);
8261 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8262
8263 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8264 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8265 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
8266 IEM_MC_REF_EFLAGS(pEFlags);
8267 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8268 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8269 else
8270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8271
8272 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
8273 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8274 IEM_MC_ADVANCE_RIP();
8275 IEM_MC_END();
8276 return VINF_SUCCESS;
8277
8278 case IEMMODE_64BIT:
8279 IEM_MC_BEGIN(4, 0);
8280 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8281 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8282#ifdef RT_ARCH_X86
8283 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8284#else
8285 IEM_MC_ARG(uint64_t, u64Src, 2);
8286#endif
8287 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8288
8289 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8290 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
8291 IEM_MC_REF_EFLAGS(pEFlags);
8292#ifdef RT_ARCH_X86
8293 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8294 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8295 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8296 else
8297 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8298#else
8299 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8300 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8301 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8302 else
8303 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8304#endif
8305
8306 IEM_MC_ADVANCE_RIP();
8307 IEM_MC_END();
8308 return VINF_SUCCESS;
8309
8310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8311 }
8312 }
8313 else
8314 {
8315 switch (pVCpu->iem.s.enmEffOpSize)
8316 {
8317 case IEMMODE_16BIT:
8318 IEM_MC_BEGIN(4, 3);
8319 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8320 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8321 IEM_MC_ARG(uint16_t, u16Src, 2);
8322 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8324 IEM_MC_LOCAL(uint16_t, u16Ax);
8325
8326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8327 IEMOP_HLP_DONE_DECODING();
8328 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8329 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8330 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
8331 IEM_MC_FETCH_EFLAGS(EFlags);
8332 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
8333 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8334 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8335 else
8336 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8337
8338 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8339 IEM_MC_COMMIT_EFLAGS(EFlags);
8340 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
8341 IEM_MC_ADVANCE_RIP();
8342 IEM_MC_END();
8343 return VINF_SUCCESS;
8344
8345 case IEMMODE_32BIT:
8346 IEM_MC_BEGIN(4, 3);
8347 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8348 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8349 IEM_MC_ARG(uint32_t, u32Src, 2);
8350 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8352 IEM_MC_LOCAL(uint32_t, u32Eax);
8353
8354 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8355 IEMOP_HLP_DONE_DECODING();
8356 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8357 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8358 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
8359 IEM_MC_FETCH_EFLAGS(EFlags);
8360 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
8361 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8362 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8363 else
8364 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8365
8366 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8367 IEM_MC_COMMIT_EFLAGS(EFlags);
8368 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
8369 IEM_MC_ADVANCE_RIP();
8370 IEM_MC_END();
8371 return VINF_SUCCESS;
8372
8373 case IEMMODE_64BIT:
8374 IEM_MC_BEGIN(4, 3);
8375 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8376 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8377#ifdef RT_ARCH_X86
8378 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8379#else
8380 IEM_MC_ARG(uint64_t, u64Src, 2);
8381#endif
8382 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8384 IEM_MC_LOCAL(uint64_t, u64Rax);
8385
8386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8387 IEMOP_HLP_DONE_DECODING();
8388 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8389 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8390 IEM_MC_FETCH_EFLAGS(EFlags);
8391 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8392#ifdef RT_ARCH_X86
8393 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8394 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8395 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8396 else
8397 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8398#else
8399 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8400 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8401 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8402 else
8403 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8404#endif
8405
8406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8407 IEM_MC_COMMIT_EFLAGS(EFlags);
8408 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8409 IEM_MC_ADVANCE_RIP();
8410 IEM_MC_END();
8411 return VINF_SUCCESS;
8412
8413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8414 }
8415 }
8416}
8417
8418
8419FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8420{
8421 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8422 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8423
8424 switch (pVCpu->iem.s.enmEffOpSize)
8425 {
8426 case IEMMODE_16BIT:
8427 IEM_MC_BEGIN(5, 1);
8428 IEM_MC_ARG(uint16_t, uSel, 0);
8429 IEM_MC_ARG(uint16_t, offSeg, 1);
8430 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8431 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8432 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8433 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8436 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8437 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8438 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8439 IEM_MC_END();
8440 return VINF_SUCCESS;
8441
8442 case IEMMODE_32BIT:
8443 IEM_MC_BEGIN(5, 1);
8444 IEM_MC_ARG(uint16_t, uSel, 0);
8445 IEM_MC_ARG(uint32_t, offSeg, 1);
8446 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8447 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8448 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8449 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8452 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8453 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8454 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8455 IEM_MC_END();
8456 return VINF_SUCCESS;
8457
8458 case IEMMODE_64BIT:
8459 IEM_MC_BEGIN(5, 1);
8460 IEM_MC_ARG(uint16_t, uSel, 0);
8461 IEM_MC_ARG(uint64_t, offSeg, 1);
8462 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8463 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8464 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8465 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8468 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8469 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8470 else
8471 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8472 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8473 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8474 IEM_MC_END();
8475 return VINF_SUCCESS;
8476
8477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8478 }
8479}
8480
8481
8482/** Opcode 0x0f 0xb2. */
8483FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8484{
8485 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8486 IEMOP_HLP_MIN_386();
8487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8488 if (IEM_IS_MODRM_REG_MODE(bRm))
8489 return IEMOP_RAISE_INVALID_OPCODE();
8490 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8491}
8492
8493
8494/** Opcode 0x0f 0xb3. */
8495FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8496{
8497 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8498 IEMOP_HLP_MIN_386();
8499 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8500}
8501
8502
8503/** Opcode 0x0f 0xb4. */
8504FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8505{
8506 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8507 IEMOP_HLP_MIN_386();
8508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8509 if (IEM_IS_MODRM_REG_MODE(bRm))
8510 return IEMOP_RAISE_INVALID_OPCODE();
8511 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8512}
8513
8514
8515/** Opcode 0x0f 0xb5. */
8516FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8517{
8518 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8519 IEMOP_HLP_MIN_386();
8520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8521 if (IEM_IS_MODRM_REG_MODE(bRm))
8522 return IEMOP_RAISE_INVALID_OPCODE();
8523 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8524}
8525
8526
8527/** Opcode 0x0f 0xb6. */
8528FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8529{
8530 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8531 IEMOP_HLP_MIN_386();
8532
8533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8534
8535 /*
8536 * If rm is denoting a register, no more instruction bytes.
8537 */
8538 if (IEM_IS_MODRM_REG_MODE(bRm))
8539 {
8540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8541 switch (pVCpu->iem.s.enmEffOpSize)
8542 {
8543 case IEMMODE_16BIT:
8544 IEM_MC_BEGIN(0, 1);
8545 IEM_MC_LOCAL(uint16_t, u16Value);
8546 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8547 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8548 IEM_MC_ADVANCE_RIP();
8549 IEM_MC_END();
8550 return VINF_SUCCESS;
8551
8552 case IEMMODE_32BIT:
8553 IEM_MC_BEGIN(0, 1);
8554 IEM_MC_LOCAL(uint32_t, u32Value);
8555 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8556 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8557 IEM_MC_ADVANCE_RIP();
8558 IEM_MC_END();
8559 return VINF_SUCCESS;
8560
8561 case IEMMODE_64BIT:
8562 IEM_MC_BEGIN(0, 1);
8563 IEM_MC_LOCAL(uint64_t, u64Value);
8564 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8565 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8566 IEM_MC_ADVANCE_RIP();
8567 IEM_MC_END();
8568 return VINF_SUCCESS;
8569
8570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8571 }
8572 }
8573 else
8574 {
8575 /*
8576 * We're loading a register from memory.
8577 */
8578 switch (pVCpu->iem.s.enmEffOpSize)
8579 {
8580 case IEMMODE_16BIT:
8581 IEM_MC_BEGIN(0, 2);
8582 IEM_MC_LOCAL(uint16_t, u16Value);
8583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8586 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8587 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8588 IEM_MC_ADVANCE_RIP();
8589 IEM_MC_END();
8590 return VINF_SUCCESS;
8591
8592 case IEMMODE_32BIT:
8593 IEM_MC_BEGIN(0, 2);
8594 IEM_MC_LOCAL(uint32_t, u32Value);
8595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8599 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8600 IEM_MC_ADVANCE_RIP();
8601 IEM_MC_END();
8602 return VINF_SUCCESS;
8603
8604 case IEMMODE_64BIT:
8605 IEM_MC_BEGIN(0, 2);
8606 IEM_MC_LOCAL(uint64_t, u64Value);
8607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8610 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8611 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8612 IEM_MC_ADVANCE_RIP();
8613 IEM_MC_END();
8614 return VINF_SUCCESS;
8615
8616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8617 }
8618 }
8619}
8620
8621
8622/** Opcode 0x0f 0xb7. */
8623FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8624{
8625 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8626 IEMOP_HLP_MIN_386();
8627
8628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8629
8630 /** @todo Not entirely sure how the operand size prefix is handled here,
8631 * assuming that it will be ignored. Would be nice to have a few
8632 * test for this. */
8633 /*
8634 * If rm is denoting a register, no more instruction bytes.
8635 */
8636 if (IEM_IS_MODRM_REG_MODE(bRm))
8637 {
8638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8639 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8640 {
8641 IEM_MC_BEGIN(0, 1);
8642 IEM_MC_LOCAL(uint32_t, u32Value);
8643 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8644 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8645 IEM_MC_ADVANCE_RIP();
8646 IEM_MC_END();
8647 }
8648 else
8649 {
8650 IEM_MC_BEGIN(0, 1);
8651 IEM_MC_LOCAL(uint64_t, u64Value);
8652 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8653 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8654 IEM_MC_ADVANCE_RIP();
8655 IEM_MC_END();
8656 }
8657 }
8658 else
8659 {
8660 /*
8661 * We're loading a register from memory.
8662 */
8663 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8664 {
8665 IEM_MC_BEGIN(0, 2);
8666 IEM_MC_LOCAL(uint32_t, u32Value);
8667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8670 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8671 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8672 IEM_MC_ADVANCE_RIP();
8673 IEM_MC_END();
8674 }
8675 else
8676 {
8677 IEM_MC_BEGIN(0, 2);
8678 IEM_MC_LOCAL(uint64_t, u64Value);
8679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8682 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8683 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8684 IEM_MC_ADVANCE_RIP();
8685 IEM_MC_END();
8686 }
8687 }
8688 return VINF_SUCCESS;
8689}
8690
8691
8692/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
8693FNIEMOP_UD_STUB(iemOp_jmpe);
8694
8695
8696/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
8697FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
8698{
8699 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8700 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
8701 return iemOp_InvalidNeedRM(pVCpu);
8702#ifndef TST_IEM_CHECK_MC
8703# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
8704 static const IEMOPBINSIZES s_Native =
8705 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
8706# endif
8707 static const IEMOPBINSIZES s_Fallback =
8708 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
8709#endif
8710 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
8711}
8712
8713
8714/**
8715 * @opcode 0xb9
8716 * @opinvalid intel-modrm
8717 * @optest ->
8718 */
8719FNIEMOP_DEF(iemOp_Grp10)
8720{
8721 /*
8722 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
8723 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
8724 */
8725 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
8726 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
8727 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
8728}
8729
8730
8731/** Opcode 0x0f 0xba. */
8732FNIEMOP_DEF(iemOp_Grp8)
8733{
8734 IEMOP_HLP_MIN_386();
8735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8736 PCIEMOPBINSIZES pImpl;
8737 switch (IEM_GET_MODRM_REG_8(bRm))
8738 {
8739 case 0: case 1: case 2: case 3:
8740 /* Both AMD and Intel want full modr/m decoding and imm8. */
8741 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
8742 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
8743 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
8744 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
8745 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
8746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8747 }
8748 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8749
8750 if (IEM_IS_MODRM_REG_MODE(bRm))
8751 {
8752 /* register destination. */
8753 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8755
8756 switch (pVCpu->iem.s.enmEffOpSize)
8757 {
8758 case IEMMODE_16BIT:
8759 IEM_MC_BEGIN(3, 0);
8760 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8761 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
8762 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8763
8764 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8765 IEM_MC_REF_EFLAGS(pEFlags);
8766 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8767
8768 IEM_MC_ADVANCE_RIP();
8769 IEM_MC_END();
8770 return VINF_SUCCESS;
8771
8772 case IEMMODE_32BIT:
8773 IEM_MC_BEGIN(3, 0);
8774 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8775 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
8776 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8777
8778 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8779 IEM_MC_REF_EFLAGS(pEFlags);
8780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8781
8782 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8783 IEM_MC_ADVANCE_RIP();
8784 IEM_MC_END();
8785 return VINF_SUCCESS;
8786
8787 case IEMMODE_64BIT:
8788 IEM_MC_BEGIN(3, 0);
8789 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8790 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
8791 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8792
8793 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8794 IEM_MC_REF_EFLAGS(pEFlags);
8795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8796
8797 IEM_MC_ADVANCE_RIP();
8798 IEM_MC_END();
8799 return VINF_SUCCESS;
8800
8801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8802 }
8803 }
8804 else
8805 {
8806 /* memory destination. */
8807
8808 uint32_t fAccess;
8809 if (pImpl->pfnLockedU16)
8810 fAccess = IEM_ACCESS_DATA_RW;
8811 else /* BT */
8812 fAccess = IEM_ACCESS_DATA_R;
8813
8814 /** @todo test negative bit offsets! */
8815 switch (pVCpu->iem.s.enmEffOpSize)
8816 {
8817 case IEMMODE_16BIT:
8818 IEM_MC_BEGIN(3, 1);
8819 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8820 IEM_MC_ARG(uint16_t, u16Src, 1);
8821 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8823
8824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8825 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8826 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
8827 if (pImpl->pfnLockedU16)
8828 IEMOP_HLP_DONE_DECODING();
8829 else
8830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8831 IEM_MC_FETCH_EFLAGS(EFlags);
8832 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8833 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8834 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8835 else
8836 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8837 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8838
8839 IEM_MC_COMMIT_EFLAGS(EFlags);
8840 IEM_MC_ADVANCE_RIP();
8841 IEM_MC_END();
8842 return VINF_SUCCESS;
8843
8844 case IEMMODE_32BIT:
8845 IEM_MC_BEGIN(3, 1);
8846 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8847 IEM_MC_ARG(uint32_t, u32Src, 1);
8848 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8850
8851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8852 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8853 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
8854 if (pImpl->pfnLockedU16)
8855 IEMOP_HLP_DONE_DECODING();
8856 else
8857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8858 IEM_MC_FETCH_EFLAGS(EFlags);
8859 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8860 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8862 else
8863 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8864 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8865
8866 IEM_MC_COMMIT_EFLAGS(EFlags);
8867 IEM_MC_ADVANCE_RIP();
8868 IEM_MC_END();
8869 return VINF_SUCCESS;
8870
8871 case IEMMODE_64BIT:
8872 IEM_MC_BEGIN(3, 1);
8873 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8874 IEM_MC_ARG(uint64_t, u64Src, 1);
8875 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8877
8878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8879 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8880 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
8881 if (pImpl->pfnLockedU16)
8882 IEMOP_HLP_DONE_DECODING();
8883 else
8884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8885 IEM_MC_FETCH_EFLAGS(EFlags);
8886 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8887 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8888 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8889 else
8890 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8891 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8892
8893 IEM_MC_COMMIT_EFLAGS(EFlags);
8894 IEM_MC_ADVANCE_RIP();
8895 IEM_MC_END();
8896 return VINF_SUCCESS;
8897
8898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8899 }
8900 }
8901}
8902
8903
8904/** Opcode 0x0f 0xbb. */
8905FNIEMOP_DEF(iemOp_btc_Ev_Gv)
8906{
8907 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
8908 IEMOP_HLP_MIN_386();
8909 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
8910}
8911
8912
8913/**
8914 * Common worker for BSF and BSR instructions.
8915 *
8916 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
8917 * the destination register, which means that for 32-bit operations the high
8918 * bits must be left alone.
8919 *
8920 * @param pImpl Pointer to the instruction implementation (assembly).
8921 */
8922FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
8923{
8924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8925
8926 /*
8927 * If rm is denoting a register, no more instruction bytes.
8928 */
8929 if (IEM_IS_MODRM_REG_MODE(bRm))
8930 {
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932 switch (pVCpu->iem.s.enmEffOpSize)
8933 {
8934 case IEMMODE_16BIT:
8935 IEM_MC_BEGIN(3, 0);
8936 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8937 IEM_MC_ARG(uint16_t, u16Src, 1);
8938 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8939
8940 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8941 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8942 IEM_MC_REF_EFLAGS(pEFlags);
8943 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8944
8945 IEM_MC_ADVANCE_RIP();
8946 IEM_MC_END();
8947 break;
8948
8949 case IEMMODE_32BIT:
8950 IEM_MC_BEGIN(3, 0);
8951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8952 IEM_MC_ARG(uint32_t, u32Src, 1);
8953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8954
8955 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8956 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8957 IEM_MC_REF_EFLAGS(pEFlags);
8958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8959 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8960 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8961 IEM_MC_ENDIF();
8962 IEM_MC_ADVANCE_RIP();
8963 IEM_MC_END();
8964 break;
8965
8966 case IEMMODE_64BIT:
8967 IEM_MC_BEGIN(3, 0);
8968 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8969 IEM_MC_ARG(uint64_t, u64Src, 1);
8970 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8971
8972 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
8973 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
8974 IEM_MC_REF_EFLAGS(pEFlags);
8975 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8976
8977 IEM_MC_ADVANCE_RIP();
8978 IEM_MC_END();
8979 break;
8980 }
8981 }
8982 else
8983 {
8984 /*
8985 * We're accessing memory.
8986 */
8987 switch (pVCpu->iem.s.enmEffOpSize)
8988 {
8989 case IEMMODE_16BIT:
8990 IEM_MC_BEGIN(3, 1);
8991 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8992 IEM_MC_ARG(uint16_t, u16Src, 1);
8993 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8995
8996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8998 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8999 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9000 IEM_MC_REF_EFLAGS(pEFlags);
9001 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9002
9003 IEM_MC_ADVANCE_RIP();
9004 IEM_MC_END();
9005 break;
9006
9007 case IEMMODE_32BIT:
9008 IEM_MC_BEGIN(3, 1);
9009 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9010 IEM_MC_ARG(uint32_t, u32Src, 1);
9011 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9013
9014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9016 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9017 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9018 IEM_MC_REF_EFLAGS(pEFlags);
9019 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9020
9021 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9022 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9023 IEM_MC_ENDIF();
9024 IEM_MC_ADVANCE_RIP();
9025 IEM_MC_END();
9026 break;
9027
9028 case IEMMODE_64BIT:
9029 IEM_MC_BEGIN(3, 1);
9030 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9031 IEM_MC_ARG(uint64_t, u64Src, 1);
9032 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9034
9035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9037 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9038 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9039 IEM_MC_REF_EFLAGS(pEFlags);
9040 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9041
9042 IEM_MC_ADVANCE_RIP();
9043 IEM_MC_END();
9044 break;
9045 }
9046 }
9047 return VINF_SUCCESS;
9048}
9049
9050
9051/** Opcode 0x0f 0xbc. */
9052FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
9053{
9054 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
9055 IEMOP_HLP_MIN_386();
9056 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9057 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
9058}
9059
9060
9061/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
9062FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
9063{
9064 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9065 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
9066 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9067
9068#ifndef TST_IEM_CHECK_MC
9069 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
9070 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
9071 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
9072 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
9073 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
9074 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
9075 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
9076 {
9077 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
9078 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
9079 };
9080#endif
9081 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9082 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9083 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9084}
9085
9086
9087/** Opcode 0x0f 0xbd. */
9088FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
9089{
9090 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
9091 IEMOP_HLP_MIN_386();
9092 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9093 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
9094}
9095
9096
9097/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
9098FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
9099{
9100 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9101 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
9102 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9103
9104#ifndef TST_IEM_CHECK_MC
9105 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
9106 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
9107 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
9108 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
9109 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
9110 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
9111 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
9112 {
9113 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
9114 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
9115 };
9116#endif
9117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9118 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9119 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9120}
9121
9122
9123
9124/** Opcode 0x0f 0xbe. */
9125FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
9126{
9127 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
9128 IEMOP_HLP_MIN_386();
9129
9130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9131
9132 /*
9133 * If rm is denoting a register, no more instruction bytes.
9134 */
9135 if (IEM_IS_MODRM_REG_MODE(bRm))
9136 {
9137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9138 switch (pVCpu->iem.s.enmEffOpSize)
9139 {
9140 case IEMMODE_16BIT:
9141 IEM_MC_BEGIN(0, 1);
9142 IEM_MC_LOCAL(uint16_t, u16Value);
9143 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9144 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9145 IEM_MC_ADVANCE_RIP();
9146 IEM_MC_END();
9147 return VINF_SUCCESS;
9148
9149 case IEMMODE_32BIT:
9150 IEM_MC_BEGIN(0, 1);
9151 IEM_MC_LOCAL(uint32_t, u32Value);
9152 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9153 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9154 IEM_MC_ADVANCE_RIP();
9155 IEM_MC_END();
9156 return VINF_SUCCESS;
9157
9158 case IEMMODE_64BIT:
9159 IEM_MC_BEGIN(0, 1);
9160 IEM_MC_LOCAL(uint64_t, u64Value);
9161 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9162 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9163 IEM_MC_ADVANCE_RIP();
9164 IEM_MC_END();
9165 return VINF_SUCCESS;
9166
9167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9168 }
9169 }
9170 else
9171 {
9172 /*
9173 * We're loading a register from memory.
9174 */
9175 switch (pVCpu->iem.s.enmEffOpSize)
9176 {
9177 case IEMMODE_16BIT:
9178 IEM_MC_BEGIN(0, 2);
9179 IEM_MC_LOCAL(uint16_t, u16Value);
9180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9183 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9184 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9185 IEM_MC_ADVANCE_RIP();
9186 IEM_MC_END();
9187 return VINF_SUCCESS;
9188
9189 case IEMMODE_32BIT:
9190 IEM_MC_BEGIN(0, 2);
9191 IEM_MC_LOCAL(uint32_t, u32Value);
9192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9195 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9196 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9197 IEM_MC_ADVANCE_RIP();
9198 IEM_MC_END();
9199 return VINF_SUCCESS;
9200
9201 case IEMMODE_64BIT:
9202 IEM_MC_BEGIN(0, 2);
9203 IEM_MC_LOCAL(uint64_t, u64Value);
9204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9207 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9208 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9209 IEM_MC_ADVANCE_RIP();
9210 IEM_MC_END();
9211 return VINF_SUCCESS;
9212
9213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9214 }
9215 }
9216}
9217
9218
9219/** Opcode 0x0f 0xbf. */
9220FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
9221{
9222 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
9223 IEMOP_HLP_MIN_386();
9224
9225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9226
9227 /** @todo Not entirely sure how the operand size prefix is handled here,
9228 * assuming that it will be ignored. Would be nice to have a few
9229 * test for this. */
9230 /*
9231 * If rm is denoting a register, no more instruction bytes.
9232 */
9233 if (IEM_IS_MODRM_REG_MODE(bRm))
9234 {
9235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9236 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9237 {
9238 IEM_MC_BEGIN(0, 1);
9239 IEM_MC_LOCAL(uint32_t, u32Value);
9240 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9241 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9242 IEM_MC_ADVANCE_RIP();
9243 IEM_MC_END();
9244 }
9245 else
9246 {
9247 IEM_MC_BEGIN(0, 1);
9248 IEM_MC_LOCAL(uint64_t, u64Value);
9249 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9250 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9251 IEM_MC_ADVANCE_RIP();
9252 IEM_MC_END();
9253 }
9254 }
9255 else
9256 {
9257 /*
9258 * We're loading a register from memory.
9259 */
9260 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9261 {
9262 IEM_MC_BEGIN(0, 2);
9263 IEM_MC_LOCAL(uint32_t, u32Value);
9264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9267 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9268 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9269 IEM_MC_ADVANCE_RIP();
9270 IEM_MC_END();
9271 }
9272 else
9273 {
9274 IEM_MC_BEGIN(0, 2);
9275 IEM_MC_LOCAL(uint64_t, u64Value);
9276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9279 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9280 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9281 IEM_MC_ADVANCE_RIP();
9282 IEM_MC_END();
9283 }
9284 }
9285 return VINF_SUCCESS;
9286}
9287
9288
9289/** Opcode 0x0f 0xc0. */
9290FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
9291{
9292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9293 IEMOP_HLP_MIN_486();
9294 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
9295
9296 /*
9297 * If rm is denoting a register, no more instruction bytes.
9298 */
9299 if (IEM_IS_MODRM_REG_MODE(bRm))
9300 {
9301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9302
9303 IEM_MC_BEGIN(3, 0);
9304 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9305 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9306 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9307
9308 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9309 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9310 IEM_MC_REF_EFLAGS(pEFlags);
9311 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9312
9313 IEM_MC_ADVANCE_RIP();
9314 IEM_MC_END();
9315 }
9316 else
9317 {
9318 /*
9319 * We're accessing memory.
9320 */
9321 IEM_MC_BEGIN(3, 3);
9322 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9323 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9324 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9325 IEM_MC_LOCAL(uint8_t, u8RegCopy);
9326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9327
9328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9329 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9330 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9331 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
9332 IEM_MC_FETCH_EFLAGS(EFlags);
9333 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9334 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9335 else
9336 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
9337
9338 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9339 IEM_MC_COMMIT_EFLAGS(EFlags);
9340 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
9341 IEM_MC_ADVANCE_RIP();
9342 IEM_MC_END();
9343 return VINF_SUCCESS;
9344 }
9345 return VINF_SUCCESS;
9346}
9347
9348
9349/** Opcode 0x0f 0xc1. */
9350FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
9351{
9352 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
9353 IEMOP_HLP_MIN_486();
9354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9355
9356 /*
9357 * If rm is denoting a register, no more instruction bytes.
9358 */
9359 if (IEM_IS_MODRM_REG_MODE(bRm))
9360 {
9361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9362
9363 switch (pVCpu->iem.s.enmEffOpSize)
9364 {
9365 case IEMMODE_16BIT:
9366 IEM_MC_BEGIN(3, 0);
9367 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9368 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9369 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9370
9371 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9372 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9373 IEM_MC_REF_EFLAGS(pEFlags);
9374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9375
9376 IEM_MC_ADVANCE_RIP();
9377 IEM_MC_END();
9378 return VINF_SUCCESS;
9379
9380 case IEMMODE_32BIT:
9381 IEM_MC_BEGIN(3, 0);
9382 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9383 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9384 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9385
9386 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9387 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9388 IEM_MC_REF_EFLAGS(pEFlags);
9389 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9390
9391 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9392 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9393 IEM_MC_ADVANCE_RIP();
9394 IEM_MC_END();
9395 return VINF_SUCCESS;
9396
9397 case IEMMODE_64BIT:
9398 IEM_MC_BEGIN(3, 0);
9399 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9400 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9401 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9402
9403 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9404 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9405 IEM_MC_REF_EFLAGS(pEFlags);
9406 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9407
9408 IEM_MC_ADVANCE_RIP();
9409 IEM_MC_END();
9410 return VINF_SUCCESS;
9411
9412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9413 }
9414 }
9415 else
9416 {
9417 /*
9418 * We're accessing memory.
9419 */
9420 switch (pVCpu->iem.s.enmEffOpSize)
9421 {
9422 case IEMMODE_16BIT:
9423 IEM_MC_BEGIN(3, 3);
9424 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9425 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9426 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9427 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9429
9430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9431 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9432 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9433 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9434 IEM_MC_FETCH_EFLAGS(EFlags);
9435 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9436 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9437 else
9438 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9439
9440 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9441 IEM_MC_COMMIT_EFLAGS(EFlags);
9442 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9443 IEM_MC_ADVANCE_RIP();
9444 IEM_MC_END();
9445 return VINF_SUCCESS;
9446
9447 case IEMMODE_32BIT:
9448 IEM_MC_BEGIN(3, 3);
9449 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9450 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9451 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9452 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9454
9455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9456 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9457 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9458 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9459 IEM_MC_FETCH_EFLAGS(EFlags);
9460 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9461 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9462 else
9463 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9464
9465 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9466 IEM_MC_COMMIT_EFLAGS(EFlags);
9467 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9468 IEM_MC_ADVANCE_RIP();
9469 IEM_MC_END();
9470 return VINF_SUCCESS;
9471
9472 case IEMMODE_64BIT:
9473 IEM_MC_BEGIN(3, 3);
9474 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9475 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9476 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9477 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9479
9480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9481 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9482 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9483 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9484 IEM_MC_FETCH_EFLAGS(EFlags);
9485 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9486 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9487 else
9488 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9489
9490 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9491 IEM_MC_COMMIT_EFLAGS(EFlags);
9492 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9493 IEM_MC_ADVANCE_RIP();
9494 IEM_MC_END();
9495 return VINF_SUCCESS;
9496
9497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9498 }
9499 }
9500}
9501
9502
9503/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9504FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9505/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9506FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9507/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9508FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9509/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9510FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9511
9512
9513/** Opcode 0x0f 0xc3. */
9514FNIEMOP_DEF(iemOp_movnti_My_Gy)
9515{
9516 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9517
9518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9519
9520 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9521 if (IEM_IS_MODRM_MEM_MODE(bRm))
9522 {
9523 switch (pVCpu->iem.s.enmEffOpSize)
9524 {
9525 case IEMMODE_32BIT:
9526 IEM_MC_BEGIN(0, 2);
9527 IEM_MC_LOCAL(uint32_t, u32Value);
9528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9529
9530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9532 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9533 return IEMOP_RAISE_INVALID_OPCODE();
9534
9535 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9536 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9537 IEM_MC_ADVANCE_RIP();
9538 IEM_MC_END();
9539 break;
9540
9541 case IEMMODE_64BIT:
9542 IEM_MC_BEGIN(0, 2);
9543 IEM_MC_LOCAL(uint64_t, u64Value);
9544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9545
9546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9548 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9549 return IEMOP_RAISE_INVALID_OPCODE();
9550
9551 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9552 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9553 IEM_MC_ADVANCE_RIP();
9554 IEM_MC_END();
9555 break;
9556
9557 case IEMMODE_16BIT:
9558 /** @todo check this form. */
9559 return IEMOP_RAISE_INVALID_OPCODE();
9560 }
9561 }
9562 else
9563 return IEMOP_RAISE_INVALID_OPCODE();
9564 return VINF_SUCCESS;
9565}
9566/* Opcode 0x66 0x0f 0xc3 - invalid */
9567/* Opcode 0xf3 0x0f 0xc3 - invalid */
9568/* Opcode 0xf2 0x0f 0xc3 - invalid */
9569
9570/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9571FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9572/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9573FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9574/* Opcode 0xf3 0x0f 0xc4 - invalid */
9575/* Opcode 0xf2 0x0f 0xc4 - invalid */
9576
9577/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9578FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9579/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9580FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9581/* Opcode 0xf3 0x0f 0xc5 - invalid */
9582/* Opcode 0xf2 0x0f 0xc5 - invalid */
9583
9584/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9585FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
9586/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
9587FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
9588/* Opcode 0xf3 0x0f 0xc6 - invalid */
9589/* Opcode 0xf2 0x0f 0xc6 - invalid */
9590
9591
9592/** Opcode 0x0f 0xc7 !11/1. */
9593FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
9594{
9595 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
9596
9597 IEM_MC_BEGIN(4, 3);
9598 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
9599 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
9600 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
9601 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9602 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
9603 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
9604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9605
9606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9607 IEMOP_HLP_DONE_DECODING();
9608 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9609
9610 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
9611 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
9612 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
9613
9614 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
9615 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
9616 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
9617
9618 IEM_MC_FETCH_EFLAGS(EFlags);
9619 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9620 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9621 else
9622 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9623
9624 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
9625 IEM_MC_COMMIT_EFLAGS(EFlags);
9626 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9627 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
9628 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
9629 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
9630 IEM_MC_ENDIF();
9631 IEM_MC_ADVANCE_RIP();
9632
9633 IEM_MC_END();
9634 return VINF_SUCCESS;
9635}
9636
9637
9638/** Opcode REX.W 0x0f 0xc7 !11/1. */
9639FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
9640{
9641 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
9642 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9643 {
9644#if 0
9645 RT_NOREF(bRm);
9646 IEMOP_BITCH_ABOUT_STUB();
9647 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9648#else
9649 IEM_MC_BEGIN(4, 3);
9650 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
9651 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
9652 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
9653 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9654 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
9655 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
9656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9657
9658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9659 IEMOP_HLP_DONE_DECODING();
9660 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
9661 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9662
9663 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
9664 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
9665 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
9666
9667 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
9668 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
9669 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
9670
9671 IEM_MC_FETCH_EFLAGS(EFlags);
9672# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
9673# if defined(RT_ARCH_AMD64)
9674 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9675# endif
9676 {
9677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9678 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9679 else
9680 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9681 }
9682# if defined(RT_ARCH_AMD64)
9683 else
9684# endif
9685# endif
9686# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
9687 {
9688 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
9689 accesses and not all all atomic, which works fine on in UNI CPU guest
9690 configuration (ignoring DMA). If guest SMP is active we have no choice
9691 but to use a rendezvous callback here. Sigh. */
9692 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
9693 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9694 else
9695 {
9696 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9697 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
9698 }
9699 }
9700# endif
9701
9702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
9703 IEM_MC_COMMIT_EFLAGS(EFlags);
9704 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9705 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
9706 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
9707 IEM_MC_ENDIF();
9708 IEM_MC_ADVANCE_RIP();
9709
9710 IEM_MC_END();
9711 return VINF_SUCCESS;
9712#endif
9713 }
9714 Log(("cmpxchg16b -> #UD\n"));
9715 return IEMOP_RAISE_INVALID_OPCODE();
9716}
9717
9718FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
9719{
9720 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
9721 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
9722 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
9723}
9724
9725/** Opcode 0x0f 0xc7 11/6. */
9726FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
9727
9728/** Opcode 0x0f 0xc7 !11/6. */
9729#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9730FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
9731{
9732 IEMOP_MNEMONIC(vmptrld, "vmptrld");
9733 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
9734 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
9735 IEM_MC_BEGIN(2, 0);
9736 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9737 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9739 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9740 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9741 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
9742 IEM_MC_END();
9743 return VINF_SUCCESS;
9744}
9745#else
9746FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
9747#endif
9748
9749/** Opcode 0x66 0x0f 0xc7 !11/6. */
9750#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9751FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
9752{
9753 IEMOP_MNEMONIC(vmclear, "vmclear");
9754 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
9755 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
9756 IEM_MC_BEGIN(2, 0);
9757 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9758 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9760 IEMOP_HLP_DONE_DECODING();
9761 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9762 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
9763 IEM_MC_END();
9764 return VINF_SUCCESS;
9765}
9766#else
9767FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
9768#endif
9769
9770/** Opcode 0xf3 0x0f 0xc7 !11/6. */
9771#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9772FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
9773{
9774 IEMOP_MNEMONIC(vmxon, "vmxon");
9775 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
9776 IEM_MC_BEGIN(2, 0);
9777 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9778 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9780 IEMOP_HLP_DONE_DECODING();
9781 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9782 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
9783 IEM_MC_END();
9784 return VINF_SUCCESS;
9785}
9786#else
9787FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
9788#endif
9789
9790/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
9791#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9792FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
9793{
9794 IEMOP_MNEMONIC(vmptrst, "vmptrst");
9795 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
9796 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
9797 IEM_MC_BEGIN(2, 0);
9798 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9799 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9801 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9802 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9803 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
9804 IEM_MC_END();
9805 return VINF_SUCCESS;
9806}
9807#else
9808FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
9809#endif
9810
9811/** Opcode 0x0f 0xc7 11/7. */
9812FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
9813
9814
9815/**
9816 * Group 9 jump table for register variant.
9817 */
9818IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
9819{ /* pfx: none, 066h, 0f3h, 0f2h */
9820 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9821 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
9822 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9823 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9824 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9825 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9826 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9827 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9828};
9829AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
9830
9831
9832/**
9833 * Group 9 jump table for memory variant.
9834 */
9835IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
9836{ /* pfx: none, 066h, 0f3h, 0f2h */
9837 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
9838 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
9839 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
9840 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
9841 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
9842 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
9843 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
9844 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
9845};
9846AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
9847
9848
9849/** Opcode 0x0f 0xc7. */
9850FNIEMOP_DEF(iemOp_Grp9)
9851{
9852 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
9853 if (IEM_IS_MODRM_REG_MODE(bRm))
9854 /* register, register */
9855 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9856 + pVCpu->iem.s.idxPrefix], bRm);
9857 /* memory, register */
9858 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
9859 + pVCpu->iem.s.idxPrefix], bRm);
9860}
9861
9862
9863/**
9864 * Common 'bswap register' helper.
9865 */
9866FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
9867{
9868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9869 switch (pVCpu->iem.s.enmEffOpSize)
9870 {
9871 case IEMMODE_16BIT:
9872 IEM_MC_BEGIN(1, 0);
9873 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9874 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
9875 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
9876 IEM_MC_ADVANCE_RIP();
9877 IEM_MC_END();
9878 return VINF_SUCCESS;
9879
9880 case IEMMODE_32BIT:
9881 IEM_MC_BEGIN(1, 0);
9882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9883 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9884 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9885 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
9886 IEM_MC_ADVANCE_RIP();
9887 IEM_MC_END();
9888 return VINF_SUCCESS;
9889
9890 case IEMMODE_64BIT:
9891 IEM_MC_BEGIN(1, 0);
9892 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9893 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9894 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
9895 IEM_MC_ADVANCE_RIP();
9896 IEM_MC_END();
9897 return VINF_SUCCESS;
9898
9899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9900 }
9901}
9902
9903
9904/** Opcode 0x0f 0xc8. */
9905FNIEMOP_DEF(iemOp_bswap_rAX_r8)
9906{
9907 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
9908 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
9909 prefix. REX.B is the correct prefix it appears. For a parallel
9910 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
9911 IEMOP_HLP_MIN_486();
9912 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
9913}
9914
9915
9916/** Opcode 0x0f 0xc9. */
9917FNIEMOP_DEF(iemOp_bswap_rCX_r9)
9918{
9919 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
9920 IEMOP_HLP_MIN_486();
9921 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
9922}
9923
9924
9925/** Opcode 0x0f 0xca. */
9926FNIEMOP_DEF(iemOp_bswap_rDX_r10)
9927{
9928 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
9929 IEMOP_HLP_MIN_486();
9930 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
9931}
9932
9933
9934/** Opcode 0x0f 0xcb. */
9935FNIEMOP_DEF(iemOp_bswap_rBX_r11)
9936{
9937 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
9938 IEMOP_HLP_MIN_486();
9939 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
9940}
9941
9942
9943/** Opcode 0x0f 0xcc. */
9944FNIEMOP_DEF(iemOp_bswap_rSP_r12)
9945{
9946 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
9947 IEMOP_HLP_MIN_486();
9948 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
9949}
9950
9951
9952/** Opcode 0x0f 0xcd. */
9953FNIEMOP_DEF(iemOp_bswap_rBP_r13)
9954{
9955 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
9956 IEMOP_HLP_MIN_486();
9957 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
9958}
9959
9960
9961/** Opcode 0x0f 0xce. */
9962FNIEMOP_DEF(iemOp_bswap_rSI_r14)
9963{
9964 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
9965 IEMOP_HLP_MIN_486();
9966 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
9967}
9968
9969
9970/** Opcode 0x0f 0xcf. */
9971FNIEMOP_DEF(iemOp_bswap_rDI_r15)
9972{
9973 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
9974 IEMOP_HLP_MIN_486();
9975 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
9976}
9977
9978
9979/* Opcode 0x0f 0xd0 - invalid */
9980/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
9981FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
9982/* Opcode 0xf3 0x0f 0xd0 - invalid */
9983/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
9984FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
9985
9986/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
9987FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
9988{
9989 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9990 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
9991}
9992
9993/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
9994FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
9995{
9996 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
9997 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
9998}
9999
10000/* Opcode 0xf3 0x0f 0xd1 - invalid */
10001/* Opcode 0xf2 0x0f 0xd1 - invalid */
10002
10003/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
10004FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
10005{
10006 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10007 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
10008}
10009
10010
10011/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
10012FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
10013{
10014 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10015 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
10016}
10017
10018
10019/* Opcode 0xf3 0x0f 0xd2 - invalid */
10020/* Opcode 0xf2 0x0f 0xd2 - invalid */
10021
10022/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
10023FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
10024{
10025 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10026 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
10027}
10028
10029
10030/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
10031FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
10032{
10033 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10034 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
10035}
10036
10037
10038/* Opcode 0xf3 0x0f 0xd3 - invalid */
10039/* Opcode 0xf2 0x0f 0xd3 - invalid */
10040
10041
10042/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
10043FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
10044{
10045 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10046 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10047}
10048
10049
10050/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
10051FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
10052{
10053 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10054 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
10055}
10056
10057
10058/* Opcode 0xf3 0x0f 0xd4 - invalid */
10059/* Opcode 0xf2 0x0f 0xd4 - invalid */
10060
10061/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
10062FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
10063{
10064 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10065 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
10066}
10067
10068/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
10069FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
10070{
10071 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10072 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
10073}
10074
10075
10076/* Opcode 0xf3 0x0f 0xd5 - invalid */
10077/* Opcode 0xf2 0x0f 0xd5 - invalid */
10078
10079/* Opcode 0x0f 0xd6 - invalid */
10080
10081/**
10082 * @opcode 0xd6
10083 * @oppfx 0x66
10084 * @opcpuid sse2
10085 * @opgroup og_sse2_pcksclr_datamove
10086 * @opxcpttype none
10087 * @optest op1=-1 op2=2 -> op1=2
10088 * @optest op1=0 op2=-42 -> op1=-42
10089 */
10090FNIEMOP_DEF(iemOp_movq_Wq_Vq)
10091{
10092 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10094 if (IEM_IS_MODRM_REG_MODE(bRm))
10095 {
10096 /*
10097 * Register, register.
10098 */
10099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10100 IEM_MC_BEGIN(0, 2);
10101 IEM_MC_LOCAL(uint64_t, uSrc);
10102
10103 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10104 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
10105
10106 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10107 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
10108
10109 IEM_MC_ADVANCE_RIP();
10110 IEM_MC_END();
10111 }
10112 else
10113 {
10114 /*
10115 * Memory, register.
10116 */
10117 IEM_MC_BEGIN(0, 2);
10118 IEM_MC_LOCAL(uint64_t, uSrc);
10119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10120
10121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10123 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10124 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10125
10126 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10127 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10128
10129 IEM_MC_ADVANCE_RIP();
10130 IEM_MC_END();
10131 }
10132 return VINF_SUCCESS;
10133}
10134
10135
10136/**
10137 * @opcode 0xd6
10138 * @opcodesub 11 mr/reg
10139 * @oppfx f3
10140 * @opcpuid sse2
10141 * @opgroup og_sse2_simdint_datamove
10142 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10143 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10144 */
10145FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
10146{
10147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10148 if (IEM_IS_MODRM_REG_MODE(bRm))
10149 {
10150 /*
10151 * Register, register.
10152 */
10153 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10155 IEM_MC_BEGIN(0, 1);
10156 IEM_MC_LOCAL(uint64_t, uSrc);
10157
10158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10159 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10160
10161 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
10162 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
10163 IEM_MC_FPU_TO_MMX_MODE();
10164
10165 IEM_MC_ADVANCE_RIP();
10166 IEM_MC_END();
10167 return VINF_SUCCESS;
10168 }
10169
10170 /**
10171 * @opdone
10172 * @opmnemonic udf30fd6mem
10173 * @opcode 0xd6
10174 * @opcodesub !11 mr/reg
10175 * @oppfx f3
10176 * @opunused intel-modrm
10177 * @opcpuid sse
10178 * @optest ->
10179 */
10180 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10181}
10182
10183
10184/**
10185 * @opcode 0xd6
10186 * @opcodesub 11 mr/reg
10187 * @oppfx f2
10188 * @opcpuid sse2
10189 * @opgroup og_sse2_simdint_datamove
10190 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10191 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10192 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
10193 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
10194 * @optest op1=-42 op2=0xfedcba9876543210
10195 * -> op1=0xfedcba9876543210 ftw=0xff
10196 */
10197FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
10198{
10199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10200 if (IEM_IS_MODRM_REG_MODE(bRm))
10201 {
10202 /*
10203 * Register, register.
10204 */
10205 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10207 IEM_MC_BEGIN(0, 1);
10208 IEM_MC_LOCAL(uint64_t, uSrc);
10209
10210 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10211 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10212
10213 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10214 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
10215 IEM_MC_FPU_TO_MMX_MODE();
10216
10217 IEM_MC_ADVANCE_RIP();
10218 IEM_MC_END();
10219 return VINF_SUCCESS;
10220 }
10221
10222 /**
10223 * @opdone
10224 * @opmnemonic udf20fd6mem
10225 * @opcode 0xd6
10226 * @opcodesub !11 mr/reg
10227 * @oppfx f2
10228 * @opunused intel-modrm
10229 * @opcpuid sse
10230 * @optest ->
10231 */
10232 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10233}
10234
10235
10236/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
10237FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
10238{
10239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10240 /* Docs says register only. */
10241 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10242 {
10243 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10244 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
10245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10246 IEM_MC_BEGIN(2, 0);
10247 IEM_MC_ARG(uint64_t *, puDst, 0);
10248 IEM_MC_ARG(uint64_t const *, puSrc, 1);
10249 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
10250 IEM_MC_PREPARE_FPU_USAGE();
10251 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
10252 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
10253 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
10254 IEM_MC_FPU_TO_MMX_MODE();
10255 IEM_MC_ADVANCE_RIP();
10256 IEM_MC_END();
10257 return VINF_SUCCESS;
10258 }
10259 return IEMOP_RAISE_INVALID_OPCODE();
10260}
10261
10262
10263/** Opcode 0x66 0x0f 0xd7 - */
10264FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
10265{
10266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10267 /* Docs says register only. */
10268 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10269 {
10270 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10271 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
10272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10273 IEM_MC_BEGIN(2, 0);
10274 IEM_MC_ARG(uint64_t *, puDst, 0);
10275 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
10276 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10277 IEM_MC_PREPARE_SSE_USAGE();
10278 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10279 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10280 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
10281 IEM_MC_ADVANCE_RIP();
10282 IEM_MC_END();
10283 return VINF_SUCCESS;
10284 }
10285 return IEMOP_RAISE_INVALID_OPCODE();
10286}
10287
10288
10289/* Opcode 0xf3 0x0f 0xd7 - invalid */
10290/* Opcode 0xf2 0x0f 0xd7 - invalid */
10291
10292
10293/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
10294FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
10295{
10296 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10297 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
10298}
10299
10300
10301/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
10302FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
10303{
10304 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10305 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
10306}
10307
10308
10309/* Opcode 0xf3 0x0f 0xd8 - invalid */
10310/* Opcode 0xf2 0x0f 0xd8 - invalid */
10311
10312/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
10313FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
10314{
10315 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10316 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
10317}
10318
10319
10320/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
10321FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
10322{
10323 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10324 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
10325}
10326
10327
10328/* Opcode 0xf3 0x0f 0xd9 - invalid */
10329/* Opcode 0xf2 0x0f 0xd9 - invalid */
10330
10331/** Opcode 0x0f 0xda - pminub Pq, Qq */
10332FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
10333{
10334 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10335 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
10336}
10337
10338
10339/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
10340FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
10341{
10342 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10343 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
10344}
10345
10346/* Opcode 0xf3 0x0f 0xda - invalid */
10347/* Opcode 0xf2 0x0f 0xda - invalid */
10348
10349/** Opcode 0x0f 0xdb - pand Pq, Qq */
10350FNIEMOP_DEF(iemOp_pand_Pq_Qq)
10351{
10352 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10353 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
10354}
10355
10356
10357/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
10358FNIEMOP_DEF(iemOp_pand_Vx_Wx)
10359{
10360 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10361 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
10362}
10363
10364
10365/* Opcode 0xf3 0x0f 0xdb - invalid */
10366/* Opcode 0xf2 0x0f 0xdb - invalid */
10367
10368/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
10369FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
10370{
10371 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10372 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
10373}
10374
10375
10376/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10377FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10378{
10379 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10380 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10381}
10382
10383
10384/* Opcode 0xf3 0x0f 0xdc - invalid */
10385/* Opcode 0xf2 0x0f 0xdc - invalid */
10386
10387/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10388FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10389{
10390 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10391 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10392}
10393
10394
10395/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
10396FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
10397{
10398 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10399 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
10400}
10401
10402
10403/* Opcode 0xf3 0x0f 0xdd - invalid */
10404/* Opcode 0xf2 0x0f 0xdd - invalid */
10405
10406/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
10407FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
10408{
10409 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10410 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
10411}
10412
10413
10414/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
10415FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
10416{
10417 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10418 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
10419}
10420
10421/* Opcode 0xf3 0x0f 0xde - invalid */
10422/* Opcode 0xf2 0x0f 0xde - invalid */
10423
10424
10425/** Opcode 0x0f 0xdf - pandn Pq, Qq */
10426FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
10427{
10428 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10429 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
10430}
10431
10432
10433/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10434FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10435{
10436 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10437 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10438}
10439
10440
10441/* Opcode 0xf3 0x0f 0xdf - invalid */
10442/* Opcode 0xf2 0x0f 0xdf - invalid */
10443
10444/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10445FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
10446{
10447 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10448 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
10449}
10450
10451
10452/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10453FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
10454{
10455 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10456 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
10457}
10458
10459
10460/* Opcode 0xf3 0x0f 0xe0 - invalid */
10461/* Opcode 0xf2 0x0f 0xe0 - invalid */
10462
10463/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10464FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10465{
10466 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10467 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10468}
10469
10470
10471/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10472FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10473{
10474 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10475 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10476}
10477
10478
10479/* Opcode 0xf3 0x0f 0xe1 - invalid */
10480/* Opcode 0xf2 0x0f 0xe1 - invalid */
10481
10482/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10483FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10484{
10485 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10486 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10487}
10488
10489
10490/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10491FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10492{
10493 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10494 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10495}
10496
10497
10498/* Opcode 0xf3 0x0f 0xe2 - invalid */
10499/* Opcode 0xf2 0x0f 0xe2 - invalid */
10500
10501/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10502FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
10503{
10504 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10505 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
10506}
10507
10508
10509/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10510FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
10511{
10512 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10513 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
10514}
10515
10516
10517/* Opcode 0xf3 0x0f 0xe3 - invalid */
10518/* Opcode 0xf2 0x0f 0xe3 - invalid */
10519
10520/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
10521FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
10522{
10523 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10524 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
10525}
10526
10527
10528/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
10529FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
10530{
10531 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10532 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
10533}
10534
10535
10536/* Opcode 0xf3 0x0f 0xe4 - invalid */
10537/* Opcode 0xf2 0x0f 0xe4 - invalid */
10538
10539/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
10540FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
10541{
10542 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10543 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
10544}
10545
10546
10547/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
10548FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
10549{
10550 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10551 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
10552}
10553
10554
10555/* Opcode 0xf3 0x0f 0xe5 - invalid */
10556/* Opcode 0xf2 0x0f 0xe5 - invalid */
10557
10558/* Opcode 0x0f 0xe6 - invalid */
10559/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
10560FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
10561/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
10562FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
10563/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
10564FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
10565
10566
10567/**
10568 * @opcode 0xe7
10569 * @opcodesub !11 mr/reg
10570 * @oppfx none
10571 * @opcpuid sse
10572 * @opgroup og_sse1_cachect
10573 * @opxcpttype none
10574 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
10575 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10576 */
10577FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
10578{
10579 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10581 if (IEM_IS_MODRM_MEM_MODE(bRm))
10582 {
10583 /* Register, memory. */
10584 IEM_MC_BEGIN(0, 2);
10585 IEM_MC_LOCAL(uint64_t, uSrc);
10586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10587
10588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
10591 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10592
10593 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
10594 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10595 IEM_MC_FPU_TO_MMX_MODE();
10596
10597 IEM_MC_ADVANCE_RIP();
10598 IEM_MC_END();
10599 return VINF_SUCCESS;
10600 }
10601 /**
10602 * @opdone
10603 * @opmnemonic ud0fe7reg
10604 * @opcode 0xe7
10605 * @opcodesub 11 mr/reg
10606 * @oppfx none
10607 * @opunused immediate
10608 * @opcpuid sse
10609 * @optest ->
10610 */
10611 return IEMOP_RAISE_INVALID_OPCODE();
10612}
10613
10614/**
10615 * @opcode 0xe7
10616 * @opcodesub !11 mr/reg
10617 * @oppfx 0x66
10618 * @opcpuid sse2
10619 * @opgroup og_sse2_cachect
10620 * @opxcpttype 1
10621 * @optest op1=-1 op2=2 -> op1=2
10622 * @optest op1=0 op2=-42 -> op1=-42
10623 */
10624FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
10625{
10626 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10628 if (IEM_IS_MODRM_MEM_MODE(bRm))
10629 {
10630 /* Register, memory. */
10631 IEM_MC_BEGIN(0, 2);
10632 IEM_MC_LOCAL(RTUINT128U, uSrc);
10633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10634
10635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10637 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10638 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10639
10640 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10641 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10642
10643 IEM_MC_ADVANCE_RIP();
10644 IEM_MC_END();
10645 return VINF_SUCCESS;
10646 }
10647
10648 /**
10649 * @opdone
10650 * @opmnemonic ud660fe7reg
10651 * @opcode 0xe7
10652 * @opcodesub 11 mr/reg
10653 * @oppfx 0x66
10654 * @opunused immediate
10655 * @opcpuid sse
10656 * @optest ->
10657 */
10658 return IEMOP_RAISE_INVALID_OPCODE();
10659}
10660
10661/* Opcode 0xf3 0x0f 0xe7 - invalid */
10662/* Opcode 0xf2 0x0f 0xe7 - invalid */
10663
10664
10665/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
10666FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
10667{
10668 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10669 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
10670}
10671
10672
10673/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
10674FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
10675{
10676 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10677 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
10678}
10679
10680
10681/* Opcode 0xf3 0x0f 0xe8 - invalid */
10682/* Opcode 0xf2 0x0f 0xe8 - invalid */
10683
10684/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
10685FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
10686{
10687 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10688 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
10689}
10690
10691
10692/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
10693FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
10694{
10695 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10696 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
10697}
10698
10699
10700/* Opcode 0xf3 0x0f 0xe9 - invalid */
10701/* Opcode 0xf2 0x0f 0xe9 - invalid */
10702
10703
10704/** Opcode 0x0f 0xea - pminsw Pq, Qq */
10705FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
10706{
10707 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10708 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
10709}
10710
10711
10712/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
10713FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
10714{
10715 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10716 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
10717}
10718
10719
10720/* Opcode 0xf3 0x0f 0xea - invalid */
10721/* Opcode 0xf2 0x0f 0xea - invalid */
10722
10723
10724/** Opcode 0x0f 0xeb - por Pq, Qq */
10725FNIEMOP_DEF(iemOp_por_Pq_Qq)
10726{
10727 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10728 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
10729}
10730
10731
10732/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
10733FNIEMOP_DEF(iemOp_por_Vx_Wx)
10734{
10735 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10736 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
10737}
10738
10739
10740/* Opcode 0xf3 0x0f 0xeb - invalid */
10741/* Opcode 0xf2 0x0f 0xeb - invalid */
10742
10743/** Opcode 0x0f 0xec - paddsb Pq, Qq */
10744FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
10745{
10746 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10747 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
10748}
10749
10750
10751/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
10752FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
10753{
10754 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10755 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
10756}
10757
10758
10759/* Opcode 0xf3 0x0f 0xec - invalid */
10760/* Opcode 0xf2 0x0f 0xec - invalid */
10761
10762/** Opcode 0x0f 0xed - paddsw Pq, Qq */
10763FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
10764{
10765 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10766 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
10767}
10768
10769
10770/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
10771FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
10772{
10773 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10774 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
10775}
10776
10777
10778/* Opcode 0xf3 0x0f 0xed - invalid */
10779/* Opcode 0xf2 0x0f 0xed - invalid */
10780
10781
10782/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
10783FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
10784{
10785 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10786 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
10787}
10788
10789
10790/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
10791FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
10792{
10793 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10794 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
10795}
10796
10797
10798/* Opcode 0xf3 0x0f 0xee - invalid */
10799/* Opcode 0xf2 0x0f 0xee - invalid */
10800
10801
10802/** Opcode 0x0f 0xef - pxor Pq, Qq */
10803FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
10804{
10805 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10806 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
10807}
10808
10809
10810/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
10811FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
10812{
10813 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10814 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
10815}
10816
10817
10818/* Opcode 0xf3 0x0f 0xef - invalid */
10819/* Opcode 0xf2 0x0f 0xef - invalid */
10820
10821/* Opcode 0x0f 0xf0 - invalid */
10822/* Opcode 0x66 0x0f 0xf0 - invalid */
10823/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
10824FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
10825
10826
10827/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
10828FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
10829{
10830 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10831 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
10832}
10833
10834
10835/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
10836FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
10837{
10838 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10839 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
10840}
10841
10842
10843/* Opcode 0xf2 0x0f 0xf1 - invalid */
10844
10845/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
10846FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
10847{
10848 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10849 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
10850}
10851
10852
10853/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
10854FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
10855{
10856 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10857 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
10858}
10859
10860
10861/* Opcode 0xf2 0x0f 0xf2 - invalid */
10862
10863/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
10864FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
10865{
10866 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10867 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
10868}
10869
10870
10871/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
10872FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
10873{
10874 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10875 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
10876}
10877
10878/* Opcode 0xf2 0x0f 0xf3 - invalid */
10879
10880/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
10881FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
10882{
10883 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10884 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
10885}
10886
10887
10888/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
10889FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
10890{
10891 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10892 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
10893}
10894
10895
10896/* Opcode 0xf2 0x0f 0xf4 - invalid */
10897
10898/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
10899FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
10900{
10901 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10902 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
10903}
10904
10905
10906/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
10907FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
10908{
10909 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10910 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
10911}
10912
10913/* Opcode 0xf2 0x0f 0xf5 - invalid */
10914
10915/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
10916FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
10917{
10918 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10919 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
10920}
10921
10922
10923/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
10924FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
10925{
10926 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10927 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
10928}
10929
10930
10931/* Opcode 0xf2 0x0f 0xf6 - invalid */
10932
10933/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
10934FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
10935/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
10936FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
10937/* Opcode 0xf2 0x0f 0xf7 - invalid */
10938
10939
10940/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
10941FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
10942{
10943 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10944 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
10945}
10946
10947
10948/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
10949FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
10950{
10951 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10952 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
10953}
10954
10955
10956/* Opcode 0xf2 0x0f 0xf8 - invalid */
10957
10958
10959/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
10960FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
10961{
10962 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10963 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
10964}
10965
10966
10967/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
10968FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
10969{
10970 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10971 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
10972}
10973
10974
10975/* Opcode 0xf2 0x0f 0xf9 - invalid */
10976
10977
10978/** Opcode 0x0f 0xfa - psubd Pq, Qq */
10979FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
10980{
10981 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10982 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
10983}
10984
10985
10986/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
10987FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
10988{
10989 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10990 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
10991}
10992
10993
10994/* Opcode 0xf2 0x0f 0xfa - invalid */
10995
10996
10997/** Opcode 0x0f 0xfb - psubq Pq, Qq */
10998FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
10999{
11000 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11001 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
11002}
11003
11004
11005/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
11006FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
11007{
11008 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11009 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
11010}
11011
11012
11013/* Opcode 0xf2 0x0f 0xfb - invalid */
11014
11015
11016/** Opcode 0x0f 0xfc - paddb Pq, Qq */
11017FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
11018{
11019 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11020 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
11021}
11022
11023
11024/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
11025FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
11026{
11027 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11028 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
11029}
11030
11031
11032/* Opcode 0xf2 0x0f 0xfc - invalid */
11033
11034
11035/** Opcode 0x0f 0xfd - paddw Pq, Qq */
11036FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
11037{
11038 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11039 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
11040}
11041
11042
11043/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
11044FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
11045{
11046 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11047 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
11048}
11049
11050
11051/* Opcode 0xf2 0x0f 0xfd - invalid */
11052
11053
11054/** Opcode 0x0f 0xfe - paddd Pq, Qq */
11055FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
11056{
11057 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11058 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
11059}
11060
11061
11062/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
11063FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
11064{
11065 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11066 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
11067}
11068
11069
11070/* Opcode 0xf2 0x0f 0xfe - invalid */
11071
11072
11073/** Opcode **** 0x0f 0xff - UD0 */
11074FNIEMOP_DEF(iemOp_ud0)
11075{
11076 IEMOP_MNEMONIC(ud0, "ud0");
11077 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
11078 {
11079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
11080#ifndef TST_IEM_CHECK_MC
11081 if (IEM_IS_MODRM_MEM_MODE(bRm))
11082 {
11083 RTGCPTR GCPtrEff;
11084 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
11085 if (rcStrict != VINF_SUCCESS)
11086 return rcStrict;
11087 }
11088#endif
11089 IEMOP_HLP_DONE_DECODING();
11090 }
11091 return IEMOP_RAISE_INVALID_OPCODE();
11092}
11093
11094
11095
11096/**
11097 * Two byte opcode map, first byte 0x0f.
11098 *
11099 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
11100 * check if it needs updating as well when making changes.
11101 */
11102IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
11103{
11104 /* no prefix, 066h prefix f3h prefix, f2h prefix */
11105 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
11106 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
11107 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
11108 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
11109 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
11110 /* 0x05 */ IEMOP_X4(iemOp_syscall),
11111 /* 0x06 */ IEMOP_X4(iemOp_clts),
11112 /* 0x07 */ IEMOP_X4(iemOp_sysret),
11113 /* 0x08 */ IEMOP_X4(iemOp_invd),
11114 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
11115 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
11116 /* 0x0b */ IEMOP_X4(iemOp_ud2),
11117 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
11118 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
11119 /* 0x0e */ IEMOP_X4(iemOp_femms),
11120 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
11121
11122 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
11123 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
11124 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
11125 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11126 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11127 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11128 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
11129 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11130 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
11131 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
11132 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
11133 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
11134 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
11135 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
11136 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
11137 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
11138
11139 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
11140 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
11141 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
11142 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
11143 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
11144 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11145 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
11146 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11147 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11148 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11149 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
11150 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11151 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
11152 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
11153 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11154 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11155
11156 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
11157 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
11158 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
11159 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
11160 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
11161 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
11162 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
11163 /* 0x37 */ IEMOP_X4(iemOp_getsec),
11164 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
11165 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11166 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
11167 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11168 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11169 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11170 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11171 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11172
11173 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
11174 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
11175 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
11176 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
11177 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
11178 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
11179 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
11180 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
11181 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
11182 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
11183 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
11184 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
11185 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
11186 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
11187 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
11188 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
11189
11190 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11191 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
11192 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
11193 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
11194 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11195 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11196 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11197 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11198 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
11199 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
11200 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
11201 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
11202 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
11203 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
11204 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
11205 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
11206
11207 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11208 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11209 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11210 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11211 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11212 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11213 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11214 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11215 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11216 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11217 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11218 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11219 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11220 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11221 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11222 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
11223
11224 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
11225 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
11226 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
11227 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
11228 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11229 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11230 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11231 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11232
11233 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11234 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11235 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11236 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11237 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
11238 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
11239 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
11240 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
11241
11242 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
11243 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
11244 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
11245 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
11246 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
11247 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
11248 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
11249 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
11250 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
11251 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
11252 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
11253 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
11254 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
11255 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
11256 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
11257 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
11258
11259 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
11260 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
11261 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
11262 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
11263 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
11264 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
11265 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
11266 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
11267 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
11268 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
11269 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
11270 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
11271 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
11272 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
11273 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
11274 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
11275
11276 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
11277 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
11278 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
11279 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
11280 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
11281 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
11282 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
11283 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
11284 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
11285 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
11286 /* 0xaa */ IEMOP_X4(iemOp_rsm),
11287 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
11288 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
11289 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
11290 /* 0xae */ IEMOP_X4(iemOp_Grp15),
11291 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
11292
11293 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
11294 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
11295 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
11296 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
11297 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
11298 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
11299 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
11300 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
11301 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
11302 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
11303 /* 0xba */ IEMOP_X4(iemOp_Grp8),
11304 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
11305 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
11306 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
11307 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
11308 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
11309
11310 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
11311 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
11312 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
11313 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11314 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11315 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11316 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11317 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
11318 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
11319 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
11320 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
11321 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
11322 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
11323 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
11324 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
11325 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
11326
11327 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
11328 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11329 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11330 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11331 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11332 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11333 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
11334 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11335 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11336 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11337 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11338 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11339 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11340 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11341 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11342 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11343
11344 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11345 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11346 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11347 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11348 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11349 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11350 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
11351 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11352 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11353 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11354 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11355 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11356 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11357 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11358 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11359 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11360
11361 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
11362 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11363 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11364 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11365 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11366 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11367 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11368 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11369 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11370 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11371 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11372 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11373 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11374 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11375 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11376 /* 0xff */ IEMOP_X4(iemOp_ud0),
11377};
11378AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
11379
11380/** @} */
11381
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette