VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h@ 96343

Last change on this file since 96343 was 96343, checked in by vboxsync, 3 years ago

VMM/IEM: Implement subss/subsd instructions + bugfixes for other instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 389.1 KB
Line 
1/* $Id: IEMAllInstructionsTwoByte0f.cpp.h 96343 2022-08-19 16:44:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Two byte opcodes (first byte 0x0f).
23 *
24 * @{
25 */
26
27
28/**
29 * Common worker for MMX instructions on the form:
30 * pxxx mm1, mm2/mem64
31 */
32FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
33{
34 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
35 if (IEM_IS_MODRM_REG_MODE(bRm))
36 {
37 /*
38 * Register, register.
39 */
40 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
41 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
42 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
43 IEM_MC_BEGIN(2, 0);
44 IEM_MC_ARG(uint64_t *, pDst, 0);
45 IEM_MC_ARG(uint64_t const *, pSrc, 1);
46 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
47 IEM_MC_PREPARE_FPU_USAGE();
48 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
49 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
50 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
51 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
52 IEM_MC_FPU_TO_MMX_MODE();
53 IEM_MC_ADVANCE_RIP();
54 IEM_MC_END();
55 }
56 else
57 {
58 /*
59 * Register, memory.
60 */
61 IEM_MC_BEGIN(2, 2);
62 IEM_MC_ARG(uint64_t *, pDst, 0);
63 IEM_MC_LOCAL(uint64_t, uSrc);
64 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
65 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
66
67 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
68 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
69 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
70 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
71
72 IEM_MC_PREPARE_FPU_USAGE();
73 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
74 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
75 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
76 IEM_MC_FPU_TO_MMX_MODE();
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for MMX instructions on the form:
87 * pxxx mm1, mm2/mem64
88 *
89 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
90 * no FXSAVE state, just the operands.
91 */
92FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
93{
94 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
95 if (IEM_IS_MODRM_REG_MODE(bRm))
96 {
97 /*
98 * Register, register.
99 */
100 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
101 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
103 IEM_MC_BEGIN(2, 0);
104 IEM_MC_ARG(uint64_t *, pDst, 0);
105 IEM_MC_ARG(uint64_t const *, pSrc, 1);
106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
107 IEM_MC_PREPARE_FPU_USAGE();
108 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
109 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
110 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
111 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
112 IEM_MC_FPU_TO_MMX_MODE();
113 IEM_MC_ADVANCE_RIP();
114 IEM_MC_END();
115 }
116 else
117 {
118 /*
119 * Register, memory.
120 */
121 IEM_MC_BEGIN(2, 2);
122 IEM_MC_ARG(uint64_t *, pDst, 0);
123 IEM_MC_LOCAL(uint64_t, uSrc);
124 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
126
127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
130 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
131
132 IEM_MC_PREPARE_FPU_USAGE();
133 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
134 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
135 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
136 IEM_MC_FPU_TO_MMX_MODE();
137
138 IEM_MC_ADVANCE_RIP();
139 IEM_MC_END();
140 }
141 return VINF_SUCCESS;
142}
143
144
145/**
146 * Common worker for MMX instructions on the form:
147 * pxxx mm1, mm2/mem64
148 * for instructions introduced with SSE.
149 */
150FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
151{
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
153 if (IEM_IS_MODRM_REG_MODE(bRm))
154 {
155 /*
156 * Register, register.
157 */
158 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
159 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
161 IEM_MC_BEGIN(2, 0);
162 IEM_MC_ARG(uint64_t *, pDst, 0);
163 IEM_MC_ARG(uint64_t const *, pSrc, 1);
164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
165 IEM_MC_PREPARE_FPU_USAGE();
166 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
167 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
168 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
169 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
170 IEM_MC_FPU_TO_MMX_MODE();
171 IEM_MC_ADVANCE_RIP();
172 IEM_MC_END();
173 }
174 else
175 {
176 /*
177 * Register, memory.
178 */
179 IEM_MC_BEGIN(2, 2);
180 IEM_MC_ARG(uint64_t *, pDst, 0);
181 IEM_MC_LOCAL(uint64_t, uSrc);
182 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
184
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
187 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
189
190 IEM_MC_PREPARE_FPU_USAGE();
191 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
192 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
193 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
194 IEM_MC_FPU_TO_MMX_MODE();
195
196 IEM_MC_ADVANCE_RIP();
197 IEM_MC_END();
198 }
199 return VINF_SUCCESS;
200}
201
202
203/**
204 * Common worker for MMX instructions on the form:
205 * pxxx mm1, mm2/mem64
206 * for instructions introduced with SSE.
207 *
208 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
209 * no FXSAVE state, just the operands.
210 */
211FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
212{
213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
214 if (IEM_IS_MODRM_REG_MODE(bRm))
215 {
216 /*
217 * Register, register.
218 */
219 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
220 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
222 IEM_MC_BEGIN(2, 0);
223 IEM_MC_ARG(uint64_t *, pDst, 0);
224 IEM_MC_ARG(uint64_t const *, pSrc, 1);
225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
226 IEM_MC_PREPARE_FPU_USAGE();
227 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
228 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
229 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
230 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
231 IEM_MC_FPU_TO_MMX_MODE();
232 IEM_MC_ADVANCE_RIP();
233 IEM_MC_END();
234 }
235 else
236 {
237 /*
238 * Register, memory.
239 */
240 IEM_MC_BEGIN(2, 2);
241 IEM_MC_ARG(uint64_t *, pDst, 0);
242 IEM_MC_LOCAL(uint64_t, uSrc);
243 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
248 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250
251 IEM_MC_PREPARE_FPU_USAGE();
252 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
253 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
254 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
255 IEM_MC_FPU_TO_MMX_MODE();
256
257 IEM_MC_ADVANCE_RIP();
258 IEM_MC_END();
259 }
260 return VINF_SUCCESS;
261}
262
263
264/**
265 * Common worker for MMX instructions on the form:
266 * pxxx mm1, mm2/mem64
267 * that was introduced with SSE2.
268 */
269FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
278 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
280 IEM_MC_BEGIN(2, 0);
281 IEM_MC_ARG(uint64_t *, pDst, 0);
282 IEM_MC_ARG(uint64_t const *, pSrc, 1);
283 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
284 IEM_MC_PREPARE_FPU_USAGE();
285 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
286 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
287 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
288 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
289 IEM_MC_FPU_TO_MMX_MODE();
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 else
294 {
295 /*
296 * Register, memory.
297 */
298 IEM_MC_BEGIN(2, 2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_LOCAL(uint64_t, uSrc);
301 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
303
304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
306 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_EX(fSupported);
307 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
308
309 IEM_MC_PREPARE_FPU_USAGE();
310 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
311 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
312 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
313 IEM_MC_FPU_TO_MMX_MODE();
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 }
318 return VINF_SUCCESS;
319}
320
321
322/**
323 * Common worker for SSE2 instructions on the forms:
324 * pxxx xmm1, xmm2/mem128
325 *
326 * Proper alignment of the 128-bit operand is enforced.
327 * Exceptions type 4. SSE2 cpuid checks.
328 *
329 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
330 */
331FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
332{
333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
334 if (IEM_IS_MODRM_REG_MODE(bRm))
335 {
336 /*
337 * Register, register.
338 */
339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
340 IEM_MC_BEGIN(2, 0);
341 IEM_MC_ARG(PRTUINT128U, pDst, 0);
342 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
343 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
344 IEM_MC_PREPARE_SSE_USAGE();
345 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
346 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
347 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 }
351 else
352 {
353 /*
354 * Register, memory.
355 */
356 IEM_MC_BEGIN(2, 2);
357 IEM_MC_ARG(PRTUINT128U, pDst, 0);
358 IEM_MC_LOCAL(RTUINT128U, uSrc);
359 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
361
362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
365 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366
367 IEM_MC_PREPARE_SSE_USAGE();
368 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
369 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
370
371 IEM_MC_ADVANCE_RIP();
372 IEM_MC_END();
373 }
374 return VINF_SUCCESS;
375}
376
377
378/**
379 * Common worker for SSE2 instructions on the forms:
380 * pxxx xmm1, xmm2/mem128
381 *
382 * Proper alignment of the 128-bit operand is enforced.
383 * Exceptions type 4. SSE2 cpuid checks.
384 *
385 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
386 * no FXSAVE state, just the operands.
387 *
388 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
389 */
390FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
391{
392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
393 if (IEM_IS_MODRM_REG_MODE(bRm))
394 {
395 /*
396 * Register, register.
397 */
398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
399 IEM_MC_BEGIN(2, 0);
400 IEM_MC_ARG(PRTUINT128U, pDst, 0);
401 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
402 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
403 IEM_MC_PREPARE_SSE_USAGE();
404 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
405 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
406 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 }
410 else
411 {
412 /*
413 * Register, memory.
414 */
415 IEM_MC_BEGIN(2, 2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_LOCAL(RTUINT128U, uSrc);
418 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
420
421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
423 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
425
426 IEM_MC_PREPARE_SSE_USAGE();
427 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
428 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
429
430 IEM_MC_ADVANCE_RIP();
431 IEM_MC_END();
432 }
433 return VINF_SUCCESS;
434}
435
436
437/**
438 * Common worker for MMX instructions on the forms:
439 * pxxxx mm1, mm2/mem32
440 *
441 * The 2nd operand is the first half of a register, which in the memory case
442 * means a 32-bit memory access.
443 */
444FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, FNIEMAIMPLMEDIAOPTF2U64, pfnU64)
445{
446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
447 if (IEM_IS_MODRM_REG_MODE(bRm))
448 {
449 /*
450 * Register, register.
451 */
452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
453 IEM_MC_BEGIN(2, 0);
454 IEM_MC_ARG(uint64_t *, puDst, 0);
455 IEM_MC_ARG(uint64_t const *, puSrc, 1);
456 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
457 IEM_MC_PREPARE_FPU_USAGE();
458 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
459 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
460 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
461 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
462 IEM_MC_FPU_TO_MMX_MODE();
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 }
466 else
467 {
468 /*
469 * Register, memory.
470 */
471 IEM_MC_BEGIN(2, 2);
472 IEM_MC_ARG(uint64_t *, puDst, 0);
473 IEM_MC_LOCAL(uint64_t, uSrc);
474 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
476
477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
479 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
480 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
481
482 IEM_MC_PREPARE_FPU_USAGE();
483 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
484 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
485 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
486 IEM_MC_FPU_TO_MMX_MODE();
487
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 return VINF_SUCCESS;
492}
493
494
495/**
496 * Common worker for SSE instructions on the forms:
497 * pxxxx xmm1, xmm2/mem128
498 *
499 * The 2nd operand is the first half of a register, which in the memory case
500 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
501 *
502 * Exceptions type 4.
503 */
504FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
505{
506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
507 if (IEM_IS_MODRM_REG_MODE(bRm))
508 {
509 /*
510 * Register, register.
511 */
512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
513 IEM_MC_BEGIN(2, 0);
514 IEM_MC_ARG(PRTUINT128U, puDst, 0);
515 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
516 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
517 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
518 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
519 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
520 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
521 IEM_MC_ADVANCE_RIP();
522 IEM_MC_END();
523 }
524 else
525 {
526 /*
527 * Register, memory.
528 */
529 IEM_MC_BEGIN(2, 2);
530 IEM_MC_ARG(PRTUINT128U, puDst, 0);
531 IEM_MC_LOCAL(RTUINT128U, uSrc);
532 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
537 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
538 /** @todo Most CPUs probably only read the low qword. We read everything to
539 * make sure we apply segmentation and alignment checks correctly.
540 * When we have time, it would be interesting to explore what real
541 * CPUs actually does and whether it will do a TLB load for the high
542 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
543 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
544
545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
546 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
547 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
548
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 }
552 return VINF_SUCCESS;
553}
554
555
556/**
557 * Common worker for SSE2 instructions on the forms:
558 * pxxxx xmm1, xmm2/mem128
559 *
560 * The 2nd operand is the first half of a register, which in the memory case
561 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
562 *
563 * Exceptions type 4.
564 */
565FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
566{
567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
568 if (IEM_IS_MODRM_REG_MODE(bRm))
569 {
570 /*
571 * Register, register.
572 */
573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
574 IEM_MC_BEGIN(2, 0);
575 IEM_MC_ARG(PRTUINT128U, puDst, 0);
576 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
577 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
578 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
579 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
580 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
581 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 }
585 else
586 {
587 /*
588 * Register, memory.
589 */
590 IEM_MC_BEGIN(2, 2);
591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
592 IEM_MC_LOCAL(RTUINT128U, uSrc);
593 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
595
596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
598 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
599 /** @todo Most CPUs probably only read the low qword. We read everything to
600 * make sure we apply segmentation and alignment checks correctly.
601 * When we have time, it would be interesting to explore what real
602 * CPUs actually does and whether it will do a TLB load for the high
603 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
604 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
605
606 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
607 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
608 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
609
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 }
613 return VINF_SUCCESS;
614}
615
616
617/**
618 * Common worker for MMX instructions on the form:
619 * pxxxx mm1, mm2/mem64
620 *
621 * The 2nd operand is the second half of a register, which in the memory case
622 * means a 64-bit memory access for MMX.
623 */
624FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
625{
626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
627 if (IEM_IS_MODRM_REG_MODE(bRm))
628 {
629 /*
630 * Register, register.
631 */
632 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
633 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
635 IEM_MC_BEGIN(2, 0);
636 IEM_MC_ARG(uint64_t *, puDst, 0);
637 IEM_MC_ARG(uint64_t const *, puSrc, 1);
638 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
639 IEM_MC_PREPARE_FPU_USAGE();
640 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
641 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
642 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
643 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
644 IEM_MC_FPU_TO_MMX_MODE();
645 IEM_MC_ADVANCE_RIP();
646 IEM_MC_END();
647 }
648 else
649 {
650 /*
651 * Register, memory.
652 */
653 IEM_MC_BEGIN(2, 2);
654 IEM_MC_ARG(uint64_t *, puDst, 0);
655 IEM_MC_LOCAL(uint64_t, uSrc);
656 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
658
659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
661 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
662 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
663
664 IEM_MC_PREPARE_FPU_USAGE();
665 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
666 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
667 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
668 IEM_MC_FPU_TO_MMX_MODE();
669
670 IEM_MC_ADVANCE_RIP();
671 IEM_MC_END();
672 }
673 return VINF_SUCCESS;
674}
675
676
677/**
678 * Common worker for SSE instructions on the form:
679 * pxxxx xmm1, xmm2/mem128
680 *
681 * The 2nd operand is the second half of a register, which for SSE a 128-bit
682 * aligned access where it may read the full 128 bits or only the upper 64 bits.
683 *
684 * Exceptions type 4.
685 */
686FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
687{
688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
689 if (IEM_IS_MODRM_REG_MODE(bRm))
690 {
691 /*
692 * Register, register.
693 */
694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
695 IEM_MC_BEGIN(2, 0);
696 IEM_MC_ARG(PRTUINT128U, puDst, 0);
697 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
698 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
699 IEM_MC_PREPARE_SSE_USAGE();
700 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
701 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
702 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
703 IEM_MC_ADVANCE_RIP();
704 IEM_MC_END();
705 }
706 else
707 {
708 /*
709 * Register, memory.
710 */
711 IEM_MC_BEGIN(2, 2);
712 IEM_MC_ARG(PRTUINT128U, puDst, 0);
713 IEM_MC_LOCAL(RTUINT128U, uSrc);
714 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
716
717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
719 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
720 /** @todo Most CPUs probably only read the high qword. We read everything to
721 * make sure we apply segmentation and alignment checks correctly.
722 * When we have time, it would be interesting to explore what real
723 * CPUs actually does and whether it will do a TLB load for the lower
724 * part or skip any associated \#PF. */
725 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
726
727 IEM_MC_PREPARE_SSE_USAGE();
728 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
729 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
730
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 return VINF_SUCCESS;
735}
736
737
738/**
739 * Common worker for SSE instructions on the forms:
740 * pxxs xmm1, xmm2/mem128
741 *
742 * Proper alignment of the 128-bit operand is enforced.
743 * Exceptions type 2. SSE cpuid checks.
744 *
745 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
746 */
747FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
748{
749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
750 if (IEM_IS_MODRM_REG_MODE(bRm))
751 {
752 /*
753 * Register, register.
754 */
755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
756 IEM_MC_BEGIN(3, 1);
757 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
758 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
759 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
760 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
761 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
762 IEM_MC_PREPARE_SSE_USAGE();
763 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
764 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
765 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
766 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
767 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
768
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 else
773 {
774 /*
775 * Register, memory.
776 */
777 IEM_MC_BEGIN(3, 2);
778 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
779 IEM_MC_LOCAL(X86XMMREG, uSrc2);
780 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
781 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
782 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
787 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
788 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789
790 IEM_MC_PREPARE_SSE_USAGE();
791 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
792 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
793 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
795
796 IEM_MC_ADVANCE_RIP();
797 IEM_MC_END();
798 }
799 return VINF_SUCCESS;
800}
801
802
803/**
804 * Common worker for SSE instructions on the forms:
805 * pxxs xmm1, xmm2/mem32
806 *
807 * Proper alignment of the 128-bit operand is enforced.
808 * Exceptions type 2. SSE cpuid checks.
809 *
810 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
811 */
812FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
813{
814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
815 if (IEM_IS_MODRM_REG_MODE(bRm))
816 {
817 /*
818 * Register, register.
819 */
820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
821 IEM_MC_BEGIN(3, 1);
822 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
823 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
824 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
825 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
826 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
827 IEM_MC_PREPARE_SSE_USAGE();
828 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
829 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
830 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
831 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
832 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
833
834 IEM_MC_ADVANCE_RIP();
835 IEM_MC_END();
836 }
837 else
838 {
839 /*
840 * Register, memory.
841 */
842 IEM_MC_BEGIN(3, 2);
843 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
844 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
845 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
846 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
847 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
849
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
852 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
853 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854
855 IEM_MC_PREPARE_SSE_USAGE();
856 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
857 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
858 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
859 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
860
861 IEM_MC_ADVANCE_RIP();
862 IEM_MC_END();
863 }
864 return VINF_SUCCESS;
865}
866
867
868/**
869 * Common worker for SSE2 instructions on the forms:
870 * pxxd xmm1, xmm2/mem128
871 *
872 * Proper alignment of the 128-bit operand is enforced.
873 * Exceptions type 2. SSE cpuid checks.
874 *
875 * @sa iemOpCommonSseFp_FullFull_To_Full
876 */
877FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
878{
879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
880 if (IEM_IS_MODRM_REG_MODE(bRm))
881 {
882 /*
883 * Register, register.
884 */
885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
886 IEM_MC_BEGIN(3, 1);
887 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
888 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
889 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
890 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
891 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
892 IEM_MC_PREPARE_SSE_USAGE();
893 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
894 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
895 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
896 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
897 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
898
899 IEM_MC_ADVANCE_RIP();
900 IEM_MC_END();
901 }
902 else
903 {
904 /*
905 * Register, memory.
906 */
907 IEM_MC_BEGIN(3, 2);
908 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
909 IEM_MC_LOCAL(X86XMMREG, uSrc2);
910 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
911 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
912 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
914
915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
917 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
918 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
919
920 IEM_MC_PREPARE_SSE_USAGE();
921 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
922 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
923 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
924 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
925
926 IEM_MC_ADVANCE_RIP();
927 IEM_MC_END();
928 }
929 return VINF_SUCCESS;
930}
931
932
933/**
934 * Common worker for SSE2 instructions on the forms:
935 * pxxs xmm1, xmm2/mem64
936 *
937 * Proper alignment of the 128-bit operand is enforced.
938 * Exceptions type 2. SSE2 cpuid checks.
939 *
940 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
941 */
942FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
943{
944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
945 if (IEM_IS_MODRM_REG_MODE(bRm))
946 {
947 /*
948 * Register, register.
949 */
950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
951 IEM_MC_BEGIN(3, 1);
952 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
953 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
954 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
955 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
957 IEM_MC_PREPARE_SSE_USAGE();
958 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
959 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
960 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
961 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
962 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
963
964 IEM_MC_ADVANCE_RIP();
965 IEM_MC_END();
966 }
967 else
968 {
969 /*
970 * Register, memory.
971 */
972 IEM_MC_BEGIN(3, 2);
973 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
974 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
975 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
976 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
977 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
979
980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
983 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
984
985 IEM_MC_PREPARE_SSE_USAGE();
986 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
987 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
988 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
989 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
990
991 IEM_MC_ADVANCE_RIP();
992 IEM_MC_END();
993 }
994 return VINF_SUCCESS;
995}
996
997
998/**
999 * Common worker for SSE2 instructions on the form:
1000 * pxxxx xmm1, xmm2/mem128
1001 *
1002 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1003 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1004 *
1005 * Exceptions type 4.
1006 */
1007FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1008{
1009 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1010 if (IEM_IS_MODRM_REG_MODE(bRm))
1011 {
1012 /*
1013 * Register, register.
1014 */
1015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1016 IEM_MC_BEGIN(2, 0);
1017 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1018 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1019 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1020 IEM_MC_PREPARE_SSE_USAGE();
1021 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1022 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1023 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1024 IEM_MC_ADVANCE_RIP();
1025 IEM_MC_END();
1026 }
1027 else
1028 {
1029 /*
1030 * Register, memory.
1031 */
1032 IEM_MC_BEGIN(2, 2);
1033 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1034 IEM_MC_LOCAL(RTUINT128U, uSrc);
1035 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1037
1038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1040 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1041 /** @todo Most CPUs probably only read the high qword. We read everything to
1042 * make sure we apply segmentation and alignment checks correctly.
1043 * When we have time, it would be interesting to explore what real
1044 * CPUs actually does and whether it will do a TLB load for the lower
1045 * part or skip any associated \#PF. */
1046 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1047
1048 IEM_MC_PREPARE_SSE_USAGE();
1049 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1050 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1051
1052 IEM_MC_ADVANCE_RIP();
1053 IEM_MC_END();
1054 }
1055 return VINF_SUCCESS;
1056}
1057
1058
1059/** Opcode 0x0f 0x00 /0. */
1060FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1061{
1062 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1063 IEMOP_HLP_MIN_286();
1064 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1065
1066 if (IEM_IS_MODRM_REG_MODE(bRm))
1067 {
1068 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1069 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1070 }
1071
1072 /* Ignore operand size here, memory refs are always 16-bit. */
1073 IEM_MC_BEGIN(2, 0);
1074 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1075 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1077 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1078 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1079 IEM_MC_CALL_CIMPL_2(iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1080 IEM_MC_END();
1081 return VINF_SUCCESS;
1082}
1083
1084
1085/** Opcode 0x0f 0x00 /1. */
1086FNIEMOPRM_DEF(iemOp_Grp6_str)
1087{
1088 IEMOP_MNEMONIC(str, "str Rv/Mw");
1089 IEMOP_HLP_MIN_286();
1090 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1091
1092
1093 if (IEM_IS_MODRM_REG_MODE(bRm))
1094 {
1095 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1096 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1097 }
1098
1099 /* Ignore operand size here, memory refs are always 16-bit. */
1100 IEM_MC_BEGIN(2, 0);
1101 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1102 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1104 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1105 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1106 IEM_MC_CALL_CIMPL_2(iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1107 IEM_MC_END();
1108 return VINF_SUCCESS;
1109}
1110
1111
1112/** Opcode 0x0f 0x00 /2. */
1113FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1114{
1115 IEMOP_MNEMONIC(lldt, "lldt Ew");
1116 IEMOP_HLP_MIN_286();
1117 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1118
1119 if (IEM_IS_MODRM_REG_MODE(bRm))
1120 {
1121 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1122 IEM_MC_BEGIN(1, 0);
1123 IEM_MC_ARG(uint16_t, u16Sel, 0);
1124 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1125 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1126 IEM_MC_END();
1127 }
1128 else
1129 {
1130 IEM_MC_BEGIN(1, 1);
1131 IEM_MC_ARG(uint16_t, u16Sel, 0);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1134 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1135 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1136 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1137 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
1138 IEM_MC_END();
1139 }
1140 return VINF_SUCCESS;
1141}
1142
1143
1144/** Opcode 0x0f 0x00 /3. */
1145FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1146{
1147 IEMOP_MNEMONIC(ltr, "ltr Ew");
1148 IEMOP_HLP_MIN_286();
1149 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1150
1151 if (IEM_IS_MODRM_REG_MODE(bRm))
1152 {
1153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1154 IEM_MC_BEGIN(1, 0);
1155 IEM_MC_ARG(uint16_t, u16Sel, 0);
1156 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1157 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1158 IEM_MC_END();
1159 }
1160 else
1161 {
1162 IEM_MC_BEGIN(1, 1);
1163 IEM_MC_ARG(uint16_t, u16Sel, 0);
1164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1168 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
1170 IEM_MC_END();
1171 }
1172 return VINF_SUCCESS;
1173}
1174
1175
1176/** Opcode 0x0f 0x00 /3. */
1177FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1178{
1179 IEMOP_HLP_MIN_286();
1180 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1181
1182 if (IEM_IS_MODRM_REG_MODE(bRm))
1183 {
1184 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1185 IEM_MC_BEGIN(2, 0);
1186 IEM_MC_ARG(uint16_t, u16Sel, 0);
1187 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1188 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1189 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1190 IEM_MC_END();
1191 }
1192 else
1193 {
1194 IEM_MC_BEGIN(2, 1);
1195 IEM_MC_ARG(uint16_t, u16Sel, 0);
1196 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1199 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1200 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1201 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
1202 IEM_MC_END();
1203 }
1204 return VINF_SUCCESS;
1205}
1206
1207
1208/** Opcode 0x0f 0x00 /4. */
1209FNIEMOPRM_DEF(iemOp_Grp6_verr)
1210{
1211 IEMOP_MNEMONIC(verr, "verr Ew");
1212 IEMOP_HLP_MIN_286();
1213 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /5. */
1218FNIEMOPRM_DEF(iemOp_Grp6_verw)
1219{
1220 IEMOP_MNEMONIC(verw, "verw Ew");
1221 IEMOP_HLP_MIN_286();
1222 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1223}
1224
1225
1226/**
1227 * Group 6 jump table.
1228 */
1229IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1230{
1231 iemOp_Grp6_sldt,
1232 iemOp_Grp6_str,
1233 iemOp_Grp6_lldt,
1234 iemOp_Grp6_ltr,
1235 iemOp_Grp6_verr,
1236 iemOp_Grp6_verw,
1237 iemOp_InvalidWithRM,
1238 iemOp_InvalidWithRM
1239};
1240
1241/** Opcode 0x0f 0x00. */
1242FNIEMOP_DEF(iemOp_Grp6)
1243{
1244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1245 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1246}
1247
1248
1249/** Opcode 0x0f 0x01 /0. */
1250FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1251{
1252 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1253 IEMOP_HLP_MIN_286();
1254 IEMOP_HLP_64BIT_OP_SIZE();
1255 IEM_MC_BEGIN(2, 1);
1256 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1257 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1260 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1261 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1262 IEM_MC_END();
1263 return VINF_SUCCESS;
1264}
1265
1266
1267/** Opcode 0x0f 0x01 /0. */
1268FNIEMOP_DEF(iemOp_Grp7_vmcall)
1269{
1270 IEMOP_MNEMONIC(vmcall, "vmcall");
1271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1272
1273 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1274 want all hypercalls regardless of instruction used, and if a
1275 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1276 (NEM/win makes ASSUMPTIONS about this behavior.) */
1277 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall);
1278}
1279
1280
1281/** Opcode 0x0f 0x01 /0. */
1282#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1283FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1284{
1285 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1286 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1287 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1288 IEMOP_HLP_DONE_DECODING();
1289 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch);
1290}
1291#else
1292FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1293{
1294 IEMOP_BITCH_ABOUT_STUB();
1295 return IEMOP_RAISE_INVALID_OPCODE();
1296}
1297#endif
1298
1299
1300/** Opcode 0x0f 0x01 /0. */
1301#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1302FNIEMOP_DEF(iemOp_Grp7_vmresume)
1303{
1304 IEMOP_MNEMONIC(vmresume, "vmresume");
1305 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1306 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1307 IEMOP_HLP_DONE_DECODING();
1308 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume);
1309}
1310#else
1311FNIEMOP_DEF(iemOp_Grp7_vmresume)
1312{
1313 IEMOP_BITCH_ABOUT_STUB();
1314 return IEMOP_RAISE_INVALID_OPCODE();
1315}
1316#endif
1317
1318
1319/** Opcode 0x0f 0x01 /0. */
1320#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1321FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1322{
1323 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1324 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1325 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1326 IEMOP_HLP_DONE_DECODING();
1327 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmxoff);
1328}
1329#else
1330FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1331{
1332 IEMOP_BITCH_ABOUT_STUB();
1333 return IEMOP_RAISE_INVALID_OPCODE();
1334}
1335#endif
1336
1337
1338/** Opcode 0x0f 0x01 /1. */
1339FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1340{
1341 IEMOP_MNEMONIC(sidt, "sidt Ms");
1342 IEMOP_HLP_MIN_286();
1343 IEMOP_HLP_64BIT_OP_SIZE();
1344 IEM_MC_BEGIN(2, 1);
1345 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1346 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1349 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1350 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1351 IEM_MC_END();
1352 return VINF_SUCCESS;
1353}
1354
1355
1356/** Opcode 0x0f 0x01 /1. */
1357FNIEMOP_DEF(iemOp_Grp7_monitor)
1358{
1359 IEMOP_MNEMONIC(monitor, "monitor");
1360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1361 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1362}
1363
1364
1365/** Opcode 0x0f 0x01 /1. */
1366FNIEMOP_DEF(iemOp_Grp7_mwait)
1367{
1368 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1370 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
1371}
1372
1373
1374/** Opcode 0x0f 0x01 /2. */
1375FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1376{
1377 IEMOP_MNEMONIC(lgdt, "lgdt");
1378 IEMOP_HLP_64BIT_OP_SIZE();
1379 IEM_MC_BEGIN(3, 1);
1380 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1381 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1382 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1385 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1386 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1387 IEM_MC_END();
1388 return VINF_SUCCESS;
1389}
1390
1391
1392/** Opcode 0x0f 0x01 0xd0. */
1393FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1394{
1395 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1396 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1397 {
1398 /** @todo r=ramshankar: We should use
1399 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1400 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1403 }
1404 return IEMOP_RAISE_INVALID_OPCODE();
1405}
1406
1407
1408/** Opcode 0x0f 0x01 0xd1. */
1409FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1410{
1411 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1413 {
1414 /** @todo r=ramshankar: We should use
1415 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1416 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1417 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1418 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1419 }
1420 return IEMOP_RAISE_INVALID_OPCODE();
1421}
1422
1423
1424/** Opcode 0x0f 0x01 /3. */
1425FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1426{
1427 IEMOP_MNEMONIC(lidt, "lidt");
1428 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1429 ? IEMMODE_64BIT
1430 : pVCpu->iem.s.enmEffOpSize;
1431 IEM_MC_BEGIN(3, 1);
1432 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1433 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1434 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1437 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1438 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1439 IEM_MC_END();
1440 return VINF_SUCCESS;
1441}
1442
1443
1444/** Opcode 0x0f 0x01 0xd8. */
1445#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1446FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1447{
1448 IEMOP_MNEMONIC(vmrun, "vmrun");
1449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1450 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun);
1451}
1452#else
1453FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1454#endif
1455
1456/** Opcode 0x0f 0x01 0xd9. */
1457FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1458{
1459 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1461
1462 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1463 want all hypercalls regardless of instruction used, and if a
1464 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1465 (NEM/win makes ASSUMPTIONS about this behavior.) */
1466 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall);
1467}
1468
1469/** Opcode 0x0f 0x01 0xda. */
1470#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1471FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1472{
1473 IEMOP_MNEMONIC(vmload, "vmload");
1474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1475 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload);
1476}
1477#else
1478FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1479#endif
1480
1481
1482/** Opcode 0x0f 0x01 0xdb. */
1483#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1484FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1485{
1486 IEMOP_MNEMONIC(vmsave, "vmsave");
1487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1488 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave);
1489}
1490#else
1491FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1492#endif
1493
1494
1495/** Opcode 0x0f 0x01 0xdc. */
1496#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1497FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1498{
1499 IEMOP_MNEMONIC(stgi, "stgi");
1500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1501 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi);
1502}
1503#else
1504FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1505#endif
1506
1507
1508/** Opcode 0x0f 0x01 0xdd. */
1509#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1510FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1511{
1512 IEMOP_MNEMONIC(clgi, "clgi");
1513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1514 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi);
1515}
1516#else
1517FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1518#endif
1519
1520
1521/** Opcode 0x0f 0x01 0xdf. */
1522#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1523FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1524{
1525 IEMOP_MNEMONIC(invlpga, "invlpga");
1526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1527 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga);
1528}
1529#else
1530FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1531#endif
1532
1533
1534/** Opcode 0x0f 0x01 0xde. */
1535#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1536FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1537{
1538 IEMOP_MNEMONIC(skinit, "skinit");
1539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1540 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_skinit);
1541}
1542#else
1543FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1544#endif
1545
1546
1547/** Opcode 0x0f 0x01 /4. */
1548FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1549{
1550 IEMOP_MNEMONIC(smsw, "smsw");
1551 IEMOP_HLP_MIN_286();
1552 if (IEM_IS_MODRM_REG_MODE(bRm))
1553 {
1554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1555 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1556 }
1557
1558 /* Ignore operand size here, memory refs are always 16-bit. */
1559 IEM_MC_BEGIN(2, 0);
1560 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1561 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1564 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1565 IEM_MC_CALL_CIMPL_2(iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1566 IEM_MC_END();
1567 return VINF_SUCCESS;
1568}
1569
1570
1571/** Opcode 0x0f 0x01 /6. */
1572FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1573{
1574 /* The operand size is effectively ignored, all is 16-bit and only the
1575 lower 3-bits are used. */
1576 IEMOP_MNEMONIC(lmsw, "lmsw");
1577 IEMOP_HLP_MIN_286();
1578 if (IEM_IS_MODRM_REG_MODE(bRm))
1579 {
1580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1581 IEM_MC_BEGIN(2, 0);
1582 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1583 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1584 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1585 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1586 IEM_MC_END();
1587 }
1588 else
1589 {
1590 IEM_MC_BEGIN(2, 0);
1591 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1592 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1595 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1596 IEM_MC_CALL_CIMPL_2(iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1597 IEM_MC_END();
1598 }
1599 return VINF_SUCCESS;
1600}
1601
1602
1603/** Opcode 0x0f 0x01 /7. */
1604FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1605{
1606 IEMOP_MNEMONIC(invlpg, "invlpg");
1607 IEMOP_HLP_MIN_486();
1608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1609 IEM_MC_BEGIN(1, 1);
1610 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1612 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1613 IEM_MC_END();
1614 return VINF_SUCCESS;
1615}
1616
1617
1618/** Opcode 0x0f 0x01 /7. */
1619FNIEMOP_DEF(iemOp_Grp7_swapgs)
1620{
1621 IEMOP_MNEMONIC(swapgs, "swapgs");
1622 IEMOP_HLP_ONLY_64BIT();
1623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1624 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1625}
1626
1627
1628/** Opcode 0x0f 0x01 /7. */
1629FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1630{
1631 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1633 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtscp);
1634}
1635
1636
1637/**
1638 * Group 7 jump table, memory variant.
1639 */
1640IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1641{
1642 iemOp_Grp7_sgdt,
1643 iemOp_Grp7_sidt,
1644 iemOp_Grp7_lgdt,
1645 iemOp_Grp7_lidt,
1646 iemOp_Grp7_smsw,
1647 iemOp_InvalidWithRM,
1648 iemOp_Grp7_lmsw,
1649 iemOp_Grp7_invlpg
1650};
1651
1652
1653/** Opcode 0x0f 0x01. */
1654FNIEMOP_DEF(iemOp_Grp7)
1655{
1656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1657 if (IEM_IS_MODRM_MEM_MODE(bRm))
1658 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1659
1660 switch (IEM_GET_MODRM_REG_8(bRm))
1661 {
1662 case 0:
1663 switch (IEM_GET_MODRM_RM_8(bRm))
1664 {
1665 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1666 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1667 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1668 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1669 }
1670 return IEMOP_RAISE_INVALID_OPCODE();
1671
1672 case 1:
1673 switch (IEM_GET_MODRM_RM_8(bRm))
1674 {
1675 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1676 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1677 }
1678 return IEMOP_RAISE_INVALID_OPCODE();
1679
1680 case 2:
1681 switch (IEM_GET_MODRM_RM_8(bRm))
1682 {
1683 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1684 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1685 }
1686 return IEMOP_RAISE_INVALID_OPCODE();
1687
1688 case 3:
1689 switch (IEM_GET_MODRM_RM_8(bRm))
1690 {
1691 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1692 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1693 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1694 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1695 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1696 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1697 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1698 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1700 }
1701
1702 case 4:
1703 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1704
1705 case 5:
1706 return IEMOP_RAISE_INVALID_OPCODE();
1707
1708 case 6:
1709 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1710
1711 case 7:
1712 switch (IEM_GET_MODRM_RM_8(bRm))
1713 {
1714 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1715 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1716 }
1717 return IEMOP_RAISE_INVALID_OPCODE();
1718
1719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1720 }
1721}
1722
1723/** Opcode 0x0f 0x00 /3. */
1724FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1725{
1726 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1728
1729 if (IEM_IS_MODRM_REG_MODE(bRm))
1730 {
1731 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1732 switch (pVCpu->iem.s.enmEffOpSize)
1733 {
1734 case IEMMODE_16BIT:
1735 {
1736 IEM_MC_BEGIN(3, 0);
1737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1738 IEM_MC_ARG(uint16_t, u16Sel, 1);
1739 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1740
1741 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1742 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1743 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1744
1745 IEM_MC_END();
1746 return VINF_SUCCESS;
1747 }
1748
1749 case IEMMODE_32BIT:
1750 case IEMMODE_64BIT:
1751 {
1752 IEM_MC_BEGIN(3, 0);
1753 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1754 IEM_MC_ARG(uint16_t, u16Sel, 1);
1755 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1756
1757 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1758 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1759 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1760
1761 IEM_MC_END();
1762 return VINF_SUCCESS;
1763 }
1764
1765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1766 }
1767 }
1768 else
1769 {
1770 switch (pVCpu->iem.s.enmEffOpSize)
1771 {
1772 case IEMMODE_16BIT:
1773 {
1774 IEM_MC_BEGIN(3, 1);
1775 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1776 IEM_MC_ARG(uint16_t, u16Sel, 1);
1777 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1779
1780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1781 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1782
1783 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1784 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1785 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1786
1787 IEM_MC_END();
1788 return VINF_SUCCESS;
1789 }
1790
1791 case IEMMODE_32BIT:
1792 case IEMMODE_64BIT:
1793 {
1794 IEM_MC_BEGIN(3, 1);
1795 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1796 IEM_MC_ARG(uint16_t, u16Sel, 1);
1797 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1799
1800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1801 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1802/** @todo testcase: make sure it's a 16-bit read. */
1803
1804 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1805 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1806 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1807
1808 IEM_MC_END();
1809 return VINF_SUCCESS;
1810 }
1811
1812 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1813 }
1814 }
1815}
1816
1817
1818
1819/** Opcode 0x0f 0x02. */
1820FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1821{
1822 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1823 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1824}
1825
1826
1827/** Opcode 0x0f 0x03. */
1828FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1829{
1830 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1831 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1832}
1833
1834
1835/** Opcode 0x0f 0x05. */
1836FNIEMOP_DEF(iemOp_syscall)
1837{
1838 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1840 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1841}
1842
1843
1844/** Opcode 0x0f 0x06. */
1845FNIEMOP_DEF(iemOp_clts)
1846{
1847 IEMOP_MNEMONIC(clts, "clts");
1848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1849 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1850}
1851
1852
1853/** Opcode 0x0f 0x07. */
1854FNIEMOP_DEF(iemOp_sysret)
1855{
1856 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1859}
1860
1861
1862/** Opcode 0x0f 0x08. */
1863FNIEMOP_DEF(iemOp_invd)
1864{
1865 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1866 IEMOP_HLP_MIN_486();
1867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1868 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invd);
1869}
1870
1871
1872/** Opcode 0x0f 0x09. */
1873FNIEMOP_DEF(iemOp_wbinvd)
1874{
1875 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1876 IEMOP_HLP_MIN_486();
1877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1878 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wbinvd);
1879}
1880
1881
1882/** Opcode 0x0f 0x0b. */
1883FNIEMOP_DEF(iemOp_ud2)
1884{
1885 IEMOP_MNEMONIC(ud2, "ud2");
1886 return IEMOP_RAISE_INVALID_OPCODE();
1887}
1888
1889/** Opcode 0x0f 0x0d. */
1890FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1891{
1892 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1893 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1894 {
1895 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1896 return IEMOP_RAISE_INVALID_OPCODE();
1897 }
1898
1899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1900 if (IEM_IS_MODRM_REG_MODE(bRm))
1901 {
1902 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1903 return IEMOP_RAISE_INVALID_OPCODE();
1904 }
1905
1906 switch (IEM_GET_MODRM_REG_8(bRm))
1907 {
1908 case 2: /* Aliased to /0 for the time being. */
1909 case 4: /* Aliased to /0 for the time being. */
1910 case 5: /* Aliased to /0 for the time being. */
1911 case 6: /* Aliased to /0 for the time being. */
1912 case 7: /* Aliased to /0 for the time being. */
1913 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1914 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1915 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1917 }
1918
1919 IEM_MC_BEGIN(0, 1);
1920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 /* Currently a NOP. */
1924 NOREF(GCPtrEffSrc);
1925 IEM_MC_ADVANCE_RIP();
1926 IEM_MC_END();
1927 return VINF_SUCCESS;
1928}
1929
1930
1931/** Opcode 0x0f 0x0e. */
1932FNIEMOP_DEF(iemOp_femms)
1933{
1934 IEMOP_MNEMONIC(femms, "femms");
1935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1936
1937 IEM_MC_BEGIN(0,0);
1938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
1939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
1940 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
1941 IEM_MC_FPU_FROM_MMX_MODE();
1942 IEM_MC_ADVANCE_RIP();
1943 IEM_MC_END();
1944 return VINF_SUCCESS;
1945}
1946
1947
1948/** Opcode 0x0f 0x0f. */
1949FNIEMOP_DEF(iemOp_3Dnow)
1950{
1951 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1952 {
1953 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1954 return IEMOP_RAISE_INVALID_OPCODE();
1955 }
1956
1957#ifdef IEM_WITH_3DNOW
1958 /* This is pretty sparse, use switch instead of table. */
1959 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1960 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
1961#else
1962 IEMOP_BITCH_ABOUT_STUB();
1963 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1964#endif
1965}
1966
1967
1968/**
1969 * @opcode 0x10
1970 * @oppfx none
1971 * @opcpuid sse
1972 * @opgroup og_sse_simdfp_datamove
1973 * @opxcpttype 4UA
1974 * @optest op1=1 op2=2 -> op1=2
1975 * @optest op1=0 op2=-22 -> op1=-22
1976 */
1977FNIEMOP_DEF(iemOp_movups_Vps_Wps)
1978{
1979 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1981 if (IEM_IS_MODRM_REG_MODE(bRm))
1982 {
1983 /*
1984 * Register, register.
1985 */
1986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1987 IEM_MC_BEGIN(0, 0);
1988 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1989 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1990 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
1991 IEM_GET_MODRM_RM(pVCpu, bRm));
1992 IEM_MC_ADVANCE_RIP();
1993 IEM_MC_END();
1994 }
1995 else
1996 {
1997 /*
1998 * Memory, register.
1999 */
2000 IEM_MC_BEGIN(0, 2);
2001 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2003
2004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2006 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2007 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2008
2009 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2010 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2011
2012 IEM_MC_ADVANCE_RIP();
2013 IEM_MC_END();
2014 }
2015 return VINF_SUCCESS;
2016
2017}
2018
2019
2020/**
2021 * @opcode 0x10
2022 * @oppfx 0x66
2023 * @opcpuid sse2
2024 * @opgroup og_sse2_pcksclr_datamove
2025 * @opxcpttype 4UA
2026 * @optest op1=1 op2=2 -> op1=2
2027 * @optest op1=0 op2=-42 -> op1=-42
2028 */
2029FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2030{
2031 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2033 if (IEM_IS_MODRM_REG_MODE(bRm))
2034 {
2035 /*
2036 * Register, register.
2037 */
2038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2039 IEM_MC_BEGIN(0, 0);
2040 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2041 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2042 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2043 IEM_GET_MODRM_RM(pVCpu, bRm));
2044 IEM_MC_ADVANCE_RIP();
2045 IEM_MC_END();
2046 }
2047 else
2048 {
2049 /*
2050 * Memory, register.
2051 */
2052 IEM_MC_BEGIN(0, 2);
2053 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2055
2056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2058 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2059 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2060
2061 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2062 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2063
2064 IEM_MC_ADVANCE_RIP();
2065 IEM_MC_END();
2066 }
2067 return VINF_SUCCESS;
2068}
2069
2070
2071/**
2072 * @opcode 0x10
2073 * @oppfx 0xf3
2074 * @opcpuid sse
2075 * @opgroup og_sse_simdfp_datamove
2076 * @opxcpttype 5
2077 * @optest op1=1 op2=2 -> op1=2
2078 * @optest op1=0 op2=-22 -> op1=-22
2079 */
2080FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2081{
2082 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2084 if (IEM_IS_MODRM_REG_MODE(bRm))
2085 {
2086 /*
2087 * Register, register.
2088 */
2089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2090 IEM_MC_BEGIN(0, 1);
2091 IEM_MC_LOCAL(uint32_t, uSrc);
2092
2093 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2094 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2095 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2096 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2097
2098 IEM_MC_ADVANCE_RIP();
2099 IEM_MC_END();
2100 }
2101 else
2102 {
2103 /*
2104 * Memory, register.
2105 */
2106 IEM_MC_BEGIN(0, 2);
2107 IEM_MC_LOCAL(uint32_t, uSrc);
2108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2109
2110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2112 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2113 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2114
2115 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2116 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2117
2118 IEM_MC_ADVANCE_RIP();
2119 IEM_MC_END();
2120 }
2121 return VINF_SUCCESS;
2122}
2123
2124
2125/**
2126 * @opcode 0x10
2127 * @oppfx 0xf2
2128 * @opcpuid sse2
2129 * @opgroup og_sse2_pcksclr_datamove
2130 * @opxcpttype 5
2131 * @optest op1=1 op2=2 -> op1=2
2132 * @optest op1=0 op2=-42 -> op1=-42
2133 */
2134FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2135{
2136 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2138 if (IEM_IS_MODRM_REG_MODE(bRm))
2139 {
2140 /*
2141 * Register, register.
2142 */
2143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2144 IEM_MC_BEGIN(0, 1);
2145 IEM_MC_LOCAL(uint64_t, uSrc);
2146
2147 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2148 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2149 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2150 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2151
2152 IEM_MC_ADVANCE_RIP();
2153 IEM_MC_END();
2154 }
2155 else
2156 {
2157 /*
2158 * Memory, register.
2159 */
2160 IEM_MC_BEGIN(0, 2);
2161 IEM_MC_LOCAL(uint64_t, uSrc);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163
2164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2166 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2167 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2168
2169 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2170 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2171
2172 IEM_MC_ADVANCE_RIP();
2173 IEM_MC_END();
2174 }
2175 return VINF_SUCCESS;
2176}
2177
2178
2179/**
2180 * @opcode 0x11
2181 * @oppfx none
2182 * @opcpuid sse
2183 * @opgroup og_sse_simdfp_datamove
2184 * @opxcpttype 4UA
2185 * @optest op1=1 op2=2 -> op1=2
2186 * @optest op1=0 op2=-42 -> op1=-42
2187 */
2188FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2189{
2190 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2192 if (IEM_IS_MODRM_REG_MODE(bRm))
2193 {
2194 /*
2195 * Register, register.
2196 */
2197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2198 IEM_MC_BEGIN(0, 0);
2199 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2200 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2201 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2202 IEM_GET_MODRM_REG(pVCpu, bRm));
2203 IEM_MC_ADVANCE_RIP();
2204 IEM_MC_END();
2205 }
2206 else
2207 {
2208 /*
2209 * Memory, register.
2210 */
2211 IEM_MC_BEGIN(0, 2);
2212 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2214
2215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2217 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2218 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2219
2220 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2221 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2222
2223 IEM_MC_ADVANCE_RIP();
2224 IEM_MC_END();
2225 }
2226 return VINF_SUCCESS;
2227}
2228
2229
2230/**
2231 * @opcode 0x11
2232 * @oppfx 0x66
2233 * @opcpuid sse2
2234 * @opgroup og_sse2_pcksclr_datamove
2235 * @opxcpttype 4UA
2236 * @optest op1=1 op2=2 -> op1=2
2237 * @optest op1=0 op2=-42 -> op1=-42
2238 */
2239FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2240{
2241 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2243 if (IEM_IS_MODRM_REG_MODE(bRm))
2244 {
2245 /*
2246 * Register, register.
2247 */
2248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2249 IEM_MC_BEGIN(0, 0);
2250 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2251 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2252 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2253 IEM_GET_MODRM_REG(pVCpu, bRm));
2254 IEM_MC_ADVANCE_RIP();
2255 IEM_MC_END();
2256 }
2257 else
2258 {
2259 /*
2260 * Memory, register.
2261 */
2262 IEM_MC_BEGIN(0, 2);
2263 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2265
2266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2268 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2269 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2270
2271 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2272 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2273
2274 IEM_MC_ADVANCE_RIP();
2275 IEM_MC_END();
2276 }
2277 return VINF_SUCCESS;
2278}
2279
2280
2281/**
2282 * @opcode 0x11
2283 * @oppfx 0xf3
2284 * @opcpuid sse
2285 * @opgroup og_sse_simdfp_datamove
2286 * @opxcpttype 5
2287 * @optest op1=1 op2=2 -> op1=2
2288 * @optest op1=0 op2=-22 -> op1=-22
2289 */
2290FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2291{
2292 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2294 if (IEM_IS_MODRM_REG_MODE(bRm))
2295 {
2296 /*
2297 * Register, register.
2298 */
2299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2300 IEM_MC_BEGIN(0, 1);
2301 IEM_MC_LOCAL(uint32_t, uSrc);
2302
2303 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2304 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2305 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2306 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2307
2308 IEM_MC_ADVANCE_RIP();
2309 IEM_MC_END();
2310 }
2311 else
2312 {
2313 /*
2314 * Memory, register.
2315 */
2316 IEM_MC_BEGIN(0, 2);
2317 IEM_MC_LOCAL(uint32_t, uSrc);
2318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2319
2320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2323 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2324
2325 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2326 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2327
2328 IEM_MC_ADVANCE_RIP();
2329 IEM_MC_END();
2330 }
2331 return VINF_SUCCESS;
2332}
2333
2334
2335/**
2336 * @opcode 0x11
2337 * @oppfx 0xf2
2338 * @opcpuid sse2
2339 * @opgroup og_sse2_pcksclr_datamove
2340 * @opxcpttype 5
2341 * @optest op1=1 op2=2 -> op1=2
2342 * @optest op1=0 op2=-42 -> op1=-42
2343 */
2344FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2345{
2346 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2348 if (IEM_IS_MODRM_REG_MODE(bRm))
2349 {
2350 /*
2351 * Register, register.
2352 */
2353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2354 IEM_MC_BEGIN(0, 1);
2355 IEM_MC_LOCAL(uint64_t, uSrc);
2356
2357 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2358 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2359 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2360 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
2361
2362 IEM_MC_ADVANCE_RIP();
2363 IEM_MC_END();
2364 }
2365 else
2366 {
2367 /*
2368 * Memory, register.
2369 */
2370 IEM_MC_BEGIN(0, 2);
2371 IEM_MC_LOCAL(uint64_t, uSrc);
2372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2373
2374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2376 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2378
2379 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2380 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2381
2382 IEM_MC_ADVANCE_RIP();
2383 IEM_MC_END();
2384 }
2385 return VINF_SUCCESS;
2386}
2387
2388
2389FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2390{
2391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2392 if (IEM_IS_MODRM_REG_MODE(bRm))
2393 {
2394 /**
2395 * @opcode 0x12
2396 * @opcodesub 11 mr/reg
2397 * @oppfx none
2398 * @opcpuid sse
2399 * @opgroup og_sse_simdfp_datamove
2400 * @opxcpttype 5
2401 * @optest op1=1 op2=2 -> op1=2
2402 * @optest op1=0 op2=-42 -> op1=-42
2403 */
2404 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2405
2406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2407 IEM_MC_BEGIN(0, 1);
2408 IEM_MC_LOCAL(uint64_t, uSrc);
2409
2410 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2411 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2412 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2413 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2414
2415 IEM_MC_ADVANCE_RIP();
2416 IEM_MC_END();
2417 }
2418 else
2419 {
2420 /**
2421 * @opdone
2422 * @opcode 0x12
2423 * @opcodesub !11 mr/reg
2424 * @oppfx none
2425 * @opcpuid sse
2426 * @opgroup og_sse_simdfp_datamove
2427 * @opxcpttype 5
2428 * @optest op1=1 op2=2 -> op1=2
2429 * @optest op1=0 op2=-42 -> op1=-42
2430 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2431 */
2432 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2433
2434 IEM_MC_BEGIN(0, 2);
2435 IEM_MC_LOCAL(uint64_t, uSrc);
2436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2437
2438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2440 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2442
2443 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2444 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2445
2446 IEM_MC_ADVANCE_RIP();
2447 IEM_MC_END();
2448 }
2449 return VINF_SUCCESS;
2450}
2451
2452
2453/**
2454 * @opcode 0x12
2455 * @opcodesub !11 mr/reg
2456 * @oppfx 0x66
2457 * @opcpuid sse2
2458 * @opgroup og_sse2_pcksclr_datamove
2459 * @opxcpttype 5
2460 * @optest op1=1 op2=2 -> op1=2
2461 * @optest op1=0 op2=-42 -> op1=-42
2462 */
2463FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2464{
2465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2466 if (IEM_IS_MODRM_MEM_MODE(bRm))
2467 {
2468 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2469
2470 IEM_MC_BEGIN(0, 2);
2471 IEM_MC_LOCAL(uint64_t, uSrc);
2472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2473
2474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2478
2479 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2480 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2481
2482 IEM_MC_ADVANCE_RIP();
2483 IEM_MC_END();
2484 return VINF_SUCCESS;
2485 }
2486
2487 /**
2488 * @opdone
2489 * @opmnemonic ud660f12m3
2490 * @opcode 0x12
2491 * @opcodesub 11 mr/reg
2492 * @oppfx 0x66
2493 * @opunused immediate
2494 * @opcpuid sse
2495 * @optest ->
2496 */
2497 return IEMOP_RAISE_INVALID_OPCODE();
2498}
2499
2500
2501/**
2502 * @opcode 0x12
2503 * @oppfx 0xf3
2504 * @opcpuid sse3
2505 * @opgroup og_sse3_pcksclr_datamove
2506 * @opxcpttype 4
2507 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2508 * op1=0x00000002000000020000000100000001
2509 */
2510FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2511{
2512 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2514 if (IEM_IS_MODRM_REG_MODE(bRm))
2515 {
2516 /*
2517 * Register, register.
2518 */
2519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2520 IEM_MC_BEGIN(2, 0);
2521 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2522 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2523
2524 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2525 IEM_MC_PREPARE_SSE_USAGE();
2526
2527 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2528 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2529 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2530
2531 IEM_MC_ADVANCE_RIP();
2532 IEM_MC_END();
2533 }
2534 else
2535 {
2536 /*
2537 * Register, memory.
2538 */
2539 IEM_MC_BEGIN(2, 2);
2540 IEM_MC_LOCAL(RTUINT128U, uSrc);
2541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2542 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2543 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2544
2545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2547 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2548 IEM_MC_PREPARE_SSE_USAGE();
2549
2550 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2551 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2552 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
2553
2554 IEM_MC_ADVANCE_RIP();
2555 IEM_MC_END();
2556 }
2557 return VINF_SUCCESS;
2558}
2559
2560
2561/**
2562 * @opcode 0x12
2563 * @oppfx 0xf2
2564 * @opcpuid sse3
2565 * @opgroup og_sse3_pcksclr_datamove
2566 * @opxcpttype 5
2567 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2568 * op1=0x22222222111111112222222211111111
2569 */
2570FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2571{
2572 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2574 if (IEM_IS_MODRM_REG_MODE(bRm))
2575 {
2576 /*
2577 * Register, register.
2578 */
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_BEGIN(2, 0);
2581 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2582 IEM_MC_ARG(uint64_t, uSrc, 1);
2583
2584 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2585 IEM_MC_PREPARE_SSE_USAGE();
2586
2587 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2588 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2589 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2590
2591 IEM_MC_ADVANCE_RIP();
2592 IEM_MC_END();
2593 }
2594 else
2595 {
2596 /*
2597 * Register, memory.
2598 */
2599 IEM_MC_BEGIN(2, 2);
2600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2601 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2602 IEM_MC_ARG(uint64_t, uSrc, 1);
2603
2604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2606 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2607 IEM_MC_PREPARE_SSE_USAGE();
2608
2609 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2610 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2611 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
2612
2613 IEM_MC_ADVANCE_RIP();
2614 IEM_MC_END();
2615 }
2616 return VINF_SUCCESS;
2617}
2618
2619
2620/**
2621 * @opcode 0x13
2622 * @opcodesub !11 mr/reg
2623 * @oppfx none
2624 * @opcpuid sse
2625 * @opgroup og_sse_simdfp_datamove
2626 * @opxcpttype 5
2627 * @optest op1=1 op2=2 -> op1=2
2628 * @optest op1=0 op2=-42 -> op1=-42
2629 */
2630FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if (IEM_IS_MODRM_MEM_MODE(bRm))
2634 {
2635 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2636
2637 IEM_MC_BEGIN(0, 2);
2638 IEM_MC_LOCAL(uint64_t, uSrc);
2639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2640
2641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2643 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2644 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2645
2646 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2647 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2648
2649 IEM_MC_ADVANCE_RIP();
2650 IEM_MC_END();
2651 return VINF_SUCCESS;
2652 }
2653
2654 /**
2655 * @opdone
2656 * @opmnemonic ud0f13m3
2657 * @opcode 0x13
2658 * @opcodesub 11 mr/reg
2659 * @oppfx none
2660 * @opunused immediate
2661 * @opcpuid sse
2662 * @optest ->
2663 */
2664 return IEMOP_RAISE_INVALID_OPCODE();
2665}
2666
2667
2668/**
2669 * @opcode 0x13
2670 * @opcodesub !11 mr/reg
2671 * @oppfx 0x66
2672 * @opcpuid sse2
2673 * @opgroup og_sse2_pcksclr_datamove
2674 * @opxcpttype 5
2675 * @optest op1=1 op2=2 -> op1=2
2676 * @optest op1=0 op2=-42 -> op1=-42
2677 */
2678FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2679{
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681 if (IEM_IS_MODRM_MEM_MODE(bRm))
2682 {
2683 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2684 IEM_MC_BEGIN(0, 2);
2685 IEM_MC_LOCAL(uint64_t, uSrc);
2686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2687
2688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2690 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2691 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2692
2693 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2694 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2695
2696 IEM_MC_ADVANCE_RIP();
2697 IEM_MC_END();
2698 return VINF_SUCCESS;
2699 }
2700
2701 /**
2702 * @opdone
2703 * @opmnemonic ud660f13m3
2704 * @opcode 0x13
2705 * @opcodesub 11 mr/reg
2706 * @oppfx 0x66
2707 * @opunused immediate
2708 * @opcpuid sse
2709 * @optest ->
2710 */
2711 return IEMOP_RAISE_INVALID_OPCODE();
2712}
2713
2714
2715/**
2716 * @opmnemonic udf30f13
2717 * @opcode 0x13
2718 * @oppfx 0xf3
2719 * @opunused intel-modrm
2720 * @opcpuid sse
2721 * @optest ->
2722 * @opdone
2723 */
2724
2725/**
2726 * @opmnemonic udf20f13
2727 * @opcode 0x13
2728 * @oppfx 0xf2
2729 * @opunused intel-modrm
2730 * @opcpuid sse
2731 * @optest ->
2732 * @opdone
2733 */
2734
2735/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2736FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2737{
2738 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2739 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2740}
2741
2742
2743/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2744FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2745{
2746 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2747 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2748}
2749
2750
2751/**
2752 * @opdone
2753 * @opmnemonic udf30f14
2754 * @opcode 0x14
2755 * @oppfx 0xf3
2756 * @opunused intel-modrm
2757 * @opcpuid sse
2758 * @optest ->
2759 * @opdone
2760 */
2761
2762/**
2763 * @opmnemonic udf20f14
2764 * @opcode 0x14
2765 * @oppfx 0xf2
2766 * @opunused intel-modrm
2767 * @opcpuid sse
2768 * @optest ->
2769 * @opdone
2770 */
2771
2772/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2773FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2774{
2775 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2776 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2777}
2778
2779
2780/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2781FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2782{
2783 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
2784 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2785}
2786
2787
2788/* Opcode 0xf3 0x0f 0x15 - invalid */
2789/* Opcode 0xf2 0x0f 0x15 - invalid */
2790
2791/**
2792 * @opdone
2793 * @opmnemonic udf30f15
2794 * @opcode 0x15
2795 * @oppfx 0xf3
2796 * @opunused intel-modrm
2797 * @opcpuid sse
2798 * @optest ->
2799 * @opdone
2800 */
2801
2802/**
2803 * @opmnemonic udf20f15
2804 * @opcode 0x15
2805 * @oppfx 0xf2
2806 * @opunused intel-modrm
2807 * @opcpuid sse
2808 * @optest ->
2809 * @opdone
2810 */
2811
2812FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2813{
2814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2815 if (IEM_IS_MODRM_REG_MODE(bRm))
2816 {
2817 /**
2818 * @opcode 0x16
2819 * @opcodesub 11 mr/reg
2820 * @oppfx none
2821 * @opcpuid sse
2822 * @opgroup og_sse_simdfp_datamove
2823 * @opxcpttype 5
2824 * @optest op1=1 op2=2 -> op1=2
2825 * @optest op1=0 op2=-42 -> op1=-42
2826 */
2827 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2828
2829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2830 IEM_MC_BEGIN(0, 1);
2831 IEM_MC_LOCAL(uint64_t, uSrc);
2832
2833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2834 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2835 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2836 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2837
2838 IEM_MC_ADVANCE_RIP();
2839 IEM_MC_END();
2840 }
2841 else
2842 {
2843 /**
2844 * @opdone
2845 * @opcode 0x16
2846 * @opcodesub !11 mr/reg
2847 * @oppfx none
2848 * @opcpuid sse
2849 * @opgroup og_sse_simdfp_datamove
2850 * @opxcpttype 5
2851 * @optest op1=1 op2=2 -> op1=2
2852 * @optest op1=0 op2=-42 -> op1=-42
2853 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2854 */
2855 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2856
2857 IEM_MC_BEGIN(0, 2);
2858 IEM_MC_LOCAL(uint64_t, uSrc);
2859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2860
2861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2863 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2864 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2865
2866 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2867 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2868
2869 IEM_MC_ADVANCE_RIP();
2870 IEM_MC_END();
2871 }
2872 return VINF_SUCCESS;
2873}
2874
2875
2876/**
2877 * @opcode 0x16
2878 * @opcodesub !11 mr/reg
2879 * @oppfx 0x66
2880 * @opcpuid sse2
2881 * @opgroup og_sse2_pcksclr_datamove
2882 * @opxcpttype 5
2883 * @optest op1=1 op2=2 -> op1=2
2884 * @optest op1=0 op2=-42 -> op1=-42
2885 */
2886FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2887{
2888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2889 if (IEM_IS_MODRM_MEM_MODE(bRm))
2890 {
2891 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2892 IEM_MC_BEGIN(0, 2);
2893 IEM_MC_LOCAL(uint64_t, uSrc);
2894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2895
2896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2898 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2899 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2900
2901 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2902 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2903
2904 IEM_MC_ADVANCE_RIP();
2905 IEM_MC_END();
2906 return VINF_SUCCESS;
2907 }
2908
2909 /**
2910 * @opdone
2911 * @opmnemonic ud660f16m3
2912 * @opcode 0x16
2913 * @opcodesub 11 mr/reg
2914 * @oppfx 0x66
2915 * @opunused immediate
2916 * @opcpuid sse
2917 * @optest ->
2918 */
2919 return IEMOP_RAISE_INVALID_OPCODE();
2920}
2921
2922
2923/**
2924 * @opcode 0x16
2925 * @oppfx 0xf3
2926 * @opcpuid sse3
2927 * @opgroup og_sse3_pcksclr_datamove
2928 * @opxcpttype 4
2929 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
2930 * op1=0x00000002000000020000000100000001
2931 */
2932FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
2933{
2934 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2936 if (IEM_IS_MODRM_REG_MODE(bRm))
2937 {
2938 /*
2939 * Register, register.
2940 */
2941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2942 IEM_MC_BEGIN(2, 0);
2943 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2944 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2945
2946 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2947 IEM_MC_PREPARE_SSE_USAGE();
2948
2949 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2950 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2951 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2952
2953 IEM_MC_ADVANCE_RIP();
2954 IEM_MC_END();
2955 }
2956 else
2957 {
2958 /*
2959 * Register, memory.
2960 */
2961 IEM_MC_BEGIN(2, 2);
2962 IEM_MC_LOCAL(RTUINT128U, uSrc);
2963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2964 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2965 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2966
2967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2969 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
2970 IEM_MC_PREPARE_SSE_USAGE();
2971
2972 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2973 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2974 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
2975
2976 IEM_MC_ADVANCE_RIP();
2977 IEM_MC_END();
2978 }
2979 return VINF_SUCCESS;
2980}
2981
2982/**
2983 * @opdone
2984 * @opmnemonic udf30f16
2985 * @opcode 0x16
2986 * @oppfx 0xf2
2987 * @opunused intel-modrm
2988 * @opcpuid sse
2989 * @optest ->
2990 * @opdone
2991 */
2992
2993
2994/**
2995 * @opcode 0x17
2996 * @opcodesub !11 mr/reg
2997 * @oppfx none
2998 * @opcpuid sse
2999 * @opgroup og_sse_simdfp_datamove
3000 * @opxcpttype 5
3001 * @optest op1=1 op2=2 -> op1=2
3002 * @optest op1=0 op2=-42 -> op1=-42
3003 */
3004FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3005{
3006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3007 if (IEM_IS_MODRM_MEM_MODE(bRm))
3008 {
3009 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3010
3011 IEM_MC_BEGIN(0, 2);
3012 IEM_MC_LOCAL(uint64_t, uSrc);
3013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3014
3015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3017 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3019
3020 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3021 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3022
3023 IEM_MC_ADVANCE_RIP();
3024 IEM_MC_END();
3025 return VINF_SUCCESS;
3026 }
3027
3028 /**
3029 * @opdone
3030 * @opmnemonic ud0f17m3
3031 * @opcode 0x17
3032 * @opcodesub 11 mr/reg
3033 * @oppfx none
3034 * @opunused immediate
3035 * @opcpuid sse
3036 * @optest ->
3037 */
3038 return IEMOP_RAISE_INVALID_OPCODE();
3039}
3040
3041
3042/**
3043 * @opcode 0x17
3044 * @opcodesub !11 mr/reg
3045 * @oppfx 0x66
3046 * @opcpuid sse2
3047 * @opgroup og_sse2_pcksclr_datamove
3048 * @opxcpttype 5
3049 * @optest op1=1 op2=2 -> op1=2
3050 * @optest op1=0 op2=-42 -> op1=-42
3051 */
3052FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3053{
3054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3055 if (IEM_IS_MODRM_MEM_MODE(bRm))
3056 {
3057 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3058
3059 IEM_MC_BEGIN(0, 2);
3060 IEM_MC_LOCAL(uint64_t, uSrc);
3061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3062
3063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3065 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3066 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3067
3068 IEM_MC_FETCH_XREG_HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3069 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3070
3071 IEM_MC_ADVANCE_RIP();
3072 IEM_MC_END();
3073 return VINF_SUCCESS;
3074 }
3075
3076 /**
3077 * @opdone
3078 * @opmnemonic ud660f17m3
3079 * @opcode 0x17
3080 * @opcodesub 11 mr/reg
3081 * @oppfx 0x66
3082 * @opunused immediate
3083 * @opcpuid sse
3084 * @optest ->
3085 */
3086 return IEMOP_RAISE_INVALID_OPCODE();
3087}
3088
3089
3090/**
3091 * @opdone
3092 * @opmnemonic udf30f17
3093 * @opcode 0x17
3094 * @oppfx 0xf3
3095 * @opunused intel-modrm
3096 * @opcpuid sse
3097 * @optest ->
3098 * @opdone
3099 */
3100
3101/**
3102 * @opmnemonic udf20f17
3103 * @opcode 0x17
3104 * @oppfx 0xf2
3105 * @opunused intel-modrm
3106 * @opcpuid sse
3107 * @optest ->
3108 * @opdone
3109 */
3110
3111
3112/** Opcode 0x0f 0x18. */
3113FNIEMOP_DEF(iemOp_prefetch_Grp16)
3114{
3115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3116 if (IEM_IS_MODRM_MEM_MODE(bRm))
3117 {
3118 switch (IEM_GET_MODRM_REG_8(bRm))
3119 {
3120 case 4: /* Aliased to /0 for the time being according to AMD. */
3121 case 5: /* Aliased to /0 for the time being according to AMD. */
3122 case 6: /* Aliased to /0 for the time being according to AMD. */
3123 case 7: /* Aliased to /0 for the time being according to AMD. */
3124 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3125 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3126 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3127 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3129 }
3130
3131 IEM_MC_BEGIN(0, 1);
3132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 /* Currently a NOP. */
3136 NOREF(GCPtrEffSrc);
3137 IEM_MC_ADVANCE_RIP();
3138 IEM_MC_END();
3139 return VINF_SUCCESS;
3140 }
3141
3142 return IEMOP_RAISE_INVALID_OPCODE();
3143}
3144
3145
3146/** Opcode 0x0f 0x19..0x1f. */
3147FNIEMOP_DEF(iemOp_nop_Ev)
3148{
3149 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 if (IEM_IS_MODRM_REG_MODE(bRm))
3152 {
3153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3154 IEM_MC_BEGIN(0, 0);
3155 IEM_MC_ADVANCE_RIP();
3156 IEM_MC_END();
3157 }
3158 else
3159 {
3160 IEM_MC_BEGIN(0, 1);
3161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3164 /* Currently a NOP. */
3165 NOREF(GCPtrEffSrc);
3166 IEM_MC_ADVANCE_RIP();
3167 IEM_MC_END();
3168 }
3169 return VINF_SUCCESS;
3170}
3171
3172
3173/** Opcode 0x0f 0x20. */
3174FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3175{
3176 /* mod is ignored, as is operand size overrides. */
3177 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3178 IEMOP_HLP_MIN_386();
3179 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3180 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3181 else
3182 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3183
3184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3185 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3186 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3187 {
3188 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3189 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3190 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3191 iCrReg |= 8;
3192 }
3193 switch (iCrReg)
3194 {
3195 case 0: case 2: case 3: case 4: case 8:
3196 break;
3197 default:
3198 return IEMOP_RAISE_INVALID_OPCODE();
3199 }
3200 IEMOP_HLP_DONE_DECODING();
3201
3202 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3203}
3204
3205
3206/** Opcode 0x0f 0x21. */
3207FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3208{
3209 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3210 IEMOP_HLP_MIN_386();
3211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3213 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3214 return IEMOP_RAISE_INVALID_OPCODE();
3215 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
3216 IEM_GET_MODRM_RM(pVCpu, bRm),
3217 IEM_GET_MODRM_REG_8(bRm));
3218}
3219
3220
3221/** Opcode 0x0f 0x22. */
3222FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3223{
3224 /* mod is ignored, as is operand size overrides. */
3225 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3226 IEMOP_HLP_MIN_386();
3227 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
3228 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3229 else
3230 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3231
3232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3233 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3234 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3235 {
3236 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3237 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3238 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
3239 iCrReg |= 8;
3240 }
3241 switch (iCrReg)
3242 {
3243 case 0: case 2: case 3: case 4: case 8:
3244 break;
3245 default:
3246 return IEMOP_RAISE_INVALID_OPCODE();
3247 }
3248 IEMOP_HLP_DONE_DECODING();
3249
3250 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3251}
3252
3253
3254/** Opcode 0x0f 0x23. */
3255FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3256{
3257 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3258 IEMOP_HLP_MIN_386();
3259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3262 return IEMOP_RAISE_INVALID_OPCODE();
3263 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
3264 IEM_GET_MODRM_REG_8(bRm),
3265 IEM_GET_MODRM_RM(pVCpu, bRm));
3266}
3267
3268
3269/** Opcode 0x0f 0x24. */
3270FNIEMOP_DEF(iemOp_mov_Rd_Td)
3271{
3272 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3273 IEMOP_HLP_MIN_386();
3274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3277 return IEMOP_RAISE_INVALID_OPCODE();
3278 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Td,
3279 IEM_GET_MODRM_RM(pVCpu, bRm),
3280 IEM_GET_MODRM_REG_8(bRm));
3281}
3282
3283
3284/** Opcode 0x0f 0x26. */
3285FNIEMOP_DEF(iemOp_mov_Td_Rd)
3286{
3287 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3288 IEMOP_HLP_MIN_386();
3289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3292 return IEMOP_RAISE_INVALID_OPCODE();
3293 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Td_Rd,
3294 IEM_GET_MODRM_REG_8(bRm),
3295 IEM_GET_MODRM_RM(pVCpu, bRm));
3296}
3297
3298
3299/**
3300 * @opcode 0x28
3301 * @oppfx none
3302 * @opcpuid sse
3303 * @opgroup og_sse_simdfp_datamove
3304 * @opxcpttype 1
3305 * @optest op1=1 op2=2 -> op1=2
3306 * @optest op1=0 op2=-42 -> op1=-42
3307 */
3308FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3309{
3310 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3312 if (IEM_IS_MODRM_REG_MODE(bRm))
3313 {
3314 /*
3315 * Register, register.
3316 */
3317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3318 IEM_MC_BEGIN(0, 0);
3319 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3320 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3321 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3322 IEM_GET_MODRM_RM(pVCpu, bRm));
3323 IEM_MC_ADVANCE_RIP();
3324 IEM_MC_END();
3325 }
3326 else
3327 {
3328 /*
3329 * Register, memory.
3330 */
3331 IEM_MC_BEGIN(0, 2);
3332 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3334
3335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3337 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3338 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3339
3340 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3341 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3342
3343 IEM_MC_ADVANCE_RIP();
3344 IEM_MC_END();
3345 }
3346 return VINF_SUCCESS;
3347}
3348
3349/**
3350 * @opcode 0x28
3351 * @oppfx 66
3352 * @opcpuid sse2
3353 * @opgroup og_sse2_pcksclr_datamove
3354 * @opxcpttype 1
3355 * @optest op1=1 op2=2 -> op1=2
3356 * @optest op1=0 op2=-42 -> op1=-42
3357 */
3358FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3359{
3360 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3362 if (IEM_IS_MODRM_REG_MODE(bRm))
3363 {
3364 /*
3365 * Register, register.
3366 */
3367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3368 IEM_MC_BEGIN(0, 0);
3369 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3370 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3371 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3372 IEM_GET_MODRM_RM(pVCpu, bRm));
3373 IEM_MC_ADVANCE_RIP();
3374 IEM_MC_END();
3375 }
3376 else
3377 {
3378 /*
3379 * Register, memory.
3380 */
3381 IEM_MC_BEGIN(0, 2);
3382 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3384
3385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3387 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3388 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3389
3390 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3391 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3392
3393 IEM_MC_ADVANCE_RIP();
3394 IEM_MC_END();
3395 }
3396 return VINF_SUCCESS;
3397}
3398
3399/* Opcode 0xf3 0x0f 0x28 - invalid */
3400/* Opcode 0xf2 0x0f 0x28 - invalid */
3401
3402/**
3403 * @opcode 0x29
3404 * @oppfx none
3405 * @opcpuid sse
3406 * @opgroup og_sse_simdfp_datamove
3407 * @opxcpttype 1
3408 * @optest op1=1 op2=2 -> op1=2
3409 * @optest op1=0 op2=-42 -> op1=-42
3410 */
3411FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3412{
3413 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3415 if (IEM_IS_MODRM_REG_MODE(bRm))
3416 {
3417 /*
3418 * Register, register.
3419 */
3420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3421 IEM_MC_BEGIN(0, 0);
3422 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3423 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3424 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3425 IEM_GET_MODRM_REG(pVCpu, bRm));
3426 IEM_MC_ADVANCE_RIP();
3427 IEM_MC_END();
3428 }
3429 else
3430 {
3431 /*
3432 * Memory, register.
3433 */
3434 IEM_MC_BEGIN(0, 2);
3435 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3437
3438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3440 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3442
3443 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3444 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3445
3446 IEM_MC_ADVANCE_RIP();
3447 IEM_MC_END();
3448 }
3449 return VINF_SUCCESS;
3450}
3451
3452/**
3453 * @opcode 0x29
3454 * @oppfx 66
3455 * @opcpuid sse2
3456 * @opgroup og_sse2_pcksclr_datamove
3457 * @opxcpttype 1
3458 * @optest op1=1 op2=2 -> op1=2
3459 * @optest op1=0 op2=-42 -> op1=-42
3460 */
3461FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3462{
3463 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3465 if (IEM_IS_MODRM_REG_MODE(bRm))
3466 {
3467 /*
3468 * Register, register.
3469 */
3470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3471 IEM_MC_BEGIN(0, 0);
3472 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3474 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3475 IEM_GET_MODRM_REG(pVCpu, bRm));
3476 IEM_MC_ADVANCE_RIP();
3477 IEM_MC_END();
3478 }
3479 else
3480 {
3481 /*
3482 * Memory, register.
3483 */
3484 IEM_MC_BEGIN(0, 2);
3485 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3487
3488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3491 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3492
3493 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3494 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3495
3496 IEM_MC_ADVANCE_RIP();
3497 IEM_MC_END();
3498 }
3499 return VINF_SUCCESS;
3500}
3501
3502/* Opcode 0xf3 0x0f 0x29 - invalid */
3503/* Opcode 0xf2 0x0f 0x29 - invalid */
3504
3505
3506/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3507FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
3508/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3509FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
3510/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
3511FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT
3512/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
3513FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT
3514
3515
3516/**
3517 * @opcode 0x2b
3518 * @opcodesub !11 mr/reg
3519 * @oppfx none
3520 * @opcpuid sse
3521 * @opgroup og_sse1_cachect
3522 * @opxcpttype 1
3523 * @optest op1=1 op2=2 -> op1=2
3524 * @optest op1=0 op2=-42 -> op1=-42
3525 */
3526FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
3527{
3528 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3530 if (IEM_IS_MODRM_MEM_MODE(bRm))
3531 {
3532 /*
3533 * memory, register.
3534 */
3535 IEM_MC_BEGIN(0, 2);
3536 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3538
3539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3541 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3543
3544 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3545 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3546
3547 IEM_MC_ADVANCE_RIP();
3548 IEM_MC_END();
3549 }
3550 /* The register, register encoding is invalid. */
3551 else
3552 return IEMOP_RAISE_INVALID_OPCODE();
3553 return VINF_SUCCESS;
3554}
3555
3556/**
3557 * @opcode 0x2b
3558 * @opcodesub !11 mr/reg
3559 * @oppfx 0x66
3560 * @opcpuid sse2
3561 * @opgroup og_sse2_cachect
3562 * @opxcpttype 1
3563 * @optest op1=1 op2=2 -> op1=2
3564 * @optest op1=0 op2=-42 -> op1=-42
3565 */
3566FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
3567{
3568 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3570 if (IEM_IS_MODRM_MEM_MODE(bRm))
3571 {
3572 /*
3573 * memory, register.
3574 */
3575 IEM_MC_BEGIN(0, 2);
3576 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3578
3579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3582 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3583
3584 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3585 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3586
3587 IEM_MC_ADVANCE_RIP();
3588 IEM_MC_END();
3589 }
3590 /* The register, register encoding is invalid. */
3591 else
3592 return IEMOP_RAISE_INVALID_OPCODE();
3593 return VINF_SUCCESS;
3594}
3595/* Opcode 0xf3 0x0f 0x2b - invalid */
3596/* Opcode 0xf2 0x0f 0x2b - invalid */
3597
3598
3599/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
3600FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
3601/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
3602FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
3603/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
3604FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss);
3605/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
3606FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd);
3607
3608/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
3609FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
3610/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
3611FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
3612/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
3613FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss);
3614/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
3615FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd);
3616
3617/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
3618FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT
3619/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
3620FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT
3621/* Opcode 0xf3 0x0f 0x2e - invalid */
3622/* Opcode 0xf2 0x0f 0x2e - invalid */
3623
3624/** Opcode 0x0f 0x2f - comiss Vss, Wss */
3625FNIEMOP_STUB(iemOp_comiss_Vss_Wss);
3626/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
3627FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd);
3628/* Opcode 0xf3 0x0f 0x2f - invalid */
3629/* Opcode 0xf2 0x0f 0x2f - invalid */
3630
3631/** Opcode 0x0f 0x30. */
3632FNIEMOP_DEF(iemOp_wrmsr)
3633{
3634 IEMOP_MNEMONIC(wrmsr, "wrmsr");
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
3637}
3638
3639
3640/** Opcode 0x0f 0x31. */
3641FNIEMOP_DEF(iemOp_rdtsc)
3642{
3643 IEMOP_MNEMONIC(rdtsc, "rdtsc");
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
3646}
3647
3648
3649/** Opcode 0x0f 0x33. */
3650FNIEMOP_DEF(iemOp_rdmsr)
3651{
3652 IEMOP_MNEMONIC(rdmsr, "rdmsr");
3653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3654 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
3655}
3656
3657
3658/** Opcode 0x0f 0x34. */
3659FNIEMOP_DEF(iemOp_rdpmc)
3660{
3661 IEMOP_MNEMONIC(rdpmc, "rdpmc");
3662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3663 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdpmc);
3664}
3665
3666
3667/** Opcode 0x0f 0x34. */
3668FNIEMOP_DEF(iemOp_sysenter)
3669{
3670 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3672 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysenter);
3673}
3674
3675/** Opcode 0x0f 0x35. */
3676FNIEMOP_DEF(iemOp_sysexit)
3677{
3678 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
3679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3680 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
3681}
3682
3683/** Opcode 0x0f 0x37. */
3684FNIEMOP_STUB(iemOp_getsec);
3685
3686
3687/** Opcode 0x0f 0x38. */
3688FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
3689{
3690#ifdef IEM_WITH_THREE_0F_38
3691 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3692 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3693#else
3694 IEMOP_BITCH_ABOUT_STUB();
3695 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3696#endif
3697}
3698
3699
3700/** Opcode 0x0f 0x3a. */
3701FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
3702{
3703#ifdef IEM_WITH_THREE_0F_3A
3704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
3705 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
3706#else
3707 IEMOP_BITCH_ABOUT_STUB();
3708 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3709#endif
3710}
3711
3712
3713/**
3714 * Implements a conditional move.
3715 *
3716 * Wish there was an obvious way to do this where we could share and reduce
3717 * code bloat.
3718 *
3719 * @param a_Cnd The conditional "microcode" operation.
3720 */
3721#define CMOV_X(a_Cnd) \
3722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
3723 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3724 { \
3725 switch (pVCpu->iem.s.enmEffOpSize) \
3726 { \
3727 case IEMMODE_16BIT: \
3728 IEM_MC_BEGIN(0, 1); \
3729 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3730 a_Cnd { \
3731 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3732 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3733 } IEM_MC_ENDIF(); \
3734 IEM_MC_ADVANCE_RIP(); \
3735 IEM_MC_END(); \
3736 return VINF_SUCCESS; \
3737 \
3738 case IEMMODE_32BIT: \
3739 IEM_MC_BEGIN(0, 1); \
3740 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3741 a_Cnd { \
3742 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3743 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3744 } IEM_MC_ELSE() { \
3745 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3746 } IEM_MC_ENDIF(); \
3747 IEM_MC_ADVANCE_RIP(); \
3748 IEM_MC_END(); \
3749 return VINF_SUCCESS; \
3750 \
3751 case IEMMODE_64BIT: \
3752 IEM_MC_BEGIN(0, 1); \
3753 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3754 a_Cnd { \
3755 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3756 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3757 } IEM_MC_ENDIF(); \
3758 IEM_MC_ADVANCE_RIP(); \
3759 IEM_MC_END(); \
3760 return VINF_SUCCESS; \
3761 \
3762 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3763 } \
3764 } \
3765 else \
3766 { \
3767 switch (pVCpu->iem.s.enmEffOpSize) \
3768 { \
3769 case IEMMODE_16BIT: \
3770 IEM_MC_BEGIN(0, 2); \
3771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3772 IEM_MC_LOCAL(uint16_t, u16Tmp); \
3773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3774 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3775 a_Cnd { \
3776 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
3777 } IEM_MC_ENDIF(); \
3778 IEM_MC_ADVANCE_RIP(); \
3779 IEM_MC_END(); \
3780 return VINF_SUCCESS; \
3781 \
3782 case IEMMODE_32BIT: \
3783 IEM_MC_BEGIN(0, 2); \
3784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3785 IEM_MC_LOCAL(uint32_t, u32Tmp); \
3786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3787 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3788 a_Cnd { \
3789 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
3790 } IEM_MC_ELSE() { \
3791 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3792 } IEM_MC_ENDIF(); \
3793 IEM_MC_ADVANCE_RIP(); \
3794 IEM_MC_END(); \
3795 return VINF_SUCCESS; \
3796 \
3797 case IEMMODE_64BIT: \
3798 IEM_MC_BEGIN(0, 2); \
3799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3800 IEM_MC_LOCAL(uint64_t, u64Tmp); \
3801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3802 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3803 a_Cnd { \
3804 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
3805 } IEM_MC_ENDIF(); \
3806 IEM_MC_ADVANCE_RIP(); \
3807 IEM_MC_END(); \
3808 return VINF_SUCCESS; \
3809 \
3810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3811 } \
3812 } do {} while (0)
3813
3814
3815
3816/** Opcode 0x0f 0x40. */
3817FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
3818{
3819 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
3820 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
3821}
3822
3823
3824/** Opcode 0x0f 0x41. */
3825FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
3826{
3827 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
3828 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
3829}
3830
3831
3832/** Opcode 0x0f 0x42. */
3833FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
3834{
3835 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
3836 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
3837}
3838
3839
3840/** Opcode 0x0f 0x43. */
3841FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
3842{
3843 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
3844 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
3845}
3846
3847
3848/** Opcode 0x0f 0x44. */
3849FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
3850{
3851 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
3852 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
3853}
3854
3855
3856/** Opcode 0x0f 0x45. */
3857FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
3858{
3859 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
3860 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
3861}
3862
3863
3864/** Opcode 0x0f 0x46. */
3865FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
3866{
3867 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
3868 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3869}
3870
3871
3872/** Opcode 0x0f 0x47. */
3873FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
3874{
3875 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
3876 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
3877}
3878
3879
3880/** Opcode 0x0f 0x48. */
3881FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
3882{
3883 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
3884 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
3885}
3886
3887
3888/** Opcode 0x0f 0x49. */
3889FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
3890{
3891 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
3892 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
3893}
3894
3895
3896/** Opcode 0x0f 0x4a. */
3897FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
3898{
3899 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
3900 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
3901}
3902
3903
3904/** Opcode 0x0f 0x4b. */
3905FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
3906{
3907 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
3908 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
3909}
3910
3911
3912/** Opcode 0x0f 0x4c. */
3913FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
3914{
3915 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
3916 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
3917}
3918
3919
3920/** Opcode 0x0f 0x4d. */
3921FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
3922{
3923 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
3924 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
3925}
3926
3927
3928/** Opcode 0x0f 0x4e. */
3929FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
3930{
3931 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
3932 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3933}
3934
3935
3936/** Opcode 0x0f 0x4f. */
3937FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
3938{
3939 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
3940 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
3941}
3942
3943#undef CMOV_X
3944
3945/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
3946FNIEMOP_STUB(iemOp_movmskps_Gy_Ups);
3947/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
3948FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd);
3949/* Opcode 0xf3 0x0f 0x50 - invalid */
3950/* Opcode 0xf2 0x0f 0x50 - invalid */
3951
3952/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
3953FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps);
3954/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
3955FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd);
3956/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
3957FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss);
3958/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
3959FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd);
3960
3961/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
3962FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps);
3963/* Opcode 0x66 0x0f 0x52 - invalid */
3964/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
3965FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss);
3966/* Opcode 0xf2 0x0f 0x52 - invalid */
3967
3968/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
3969FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
3970/* Opcode 0x66 0x0f 0x53 - invalid */
3971/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
3972FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
3973/* Opcode 0xf2 0x0f 0x53 - invalid */
3974
3975
3976/** Opcode 0x0f 0x54 - andps Vps, Wps */
3977FNIEMOP_DEF(iemOp_andps_Vps_Wps)
3978{
3979 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
3980 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3981}
3982
3983
3984/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
3985FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
3986{
3987 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
3988 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
3989}
3990
3991
3992/* Opcode 0xf3 0x0f 0x54 - invalid */
3993/* Opcode 0xf2 0x0f 0x54 - invalid */
3994
3995
3996/** Opcode 0x0f 0x55 - andnps Vps, Wps */
3997FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
3998{
3999 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4000 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
4001}
4002
4003
4004/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
4005FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
4006{
4007 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4008 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
4009}
4010
4011
4012/* Opcode 0xf3 0x0f 0x55 - invalid */
4013/* Opcode 0xf2 0x0f 0x55 - invalid */
4014
4015
4016/** Opcode 0x0f 0x56 - orps Vps, Wps */
4017FNIEMOP_DEF(iemOp_orps_Vps_Wps)
4018{
4019 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4020 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
4021}
4022
4023
4024/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
4025FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
4026{
4027 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4028 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
4029}
4030
4031
4032/* Opcode 0xf3 0x0f 0x56 - invalid */
4033/* Opcode 0xf2 0x0f 0x56 - invalid */
4034
4035
4036/** Opcode 0x0f 0x57 - xorps Vps, Wps */
4037FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
4038{
4039 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4040 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
4041}
4042
4043
4044/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
4045FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
4046{
4047 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4048 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
4049}
4050
4051
4052/* Opcode 0xf3 0x0f 0x57 - invalid */
4053/* Opcode 0xf2 0x0f 0x57 - invalid */
4054
4055/** Opcode 0x0f 0x58 - addps Vps, Wps */
4056FNIEMOP_DEF(iemOp_addps_Vps_Wps)
4057{
4058 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4059 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
4060}
4061
4062
4063/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
4064FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
4065{
4066 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4067 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
4068}
4069
4070
4071/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
4072FNIEMOP_DEF(iemOp_addss_Vss_Wss)
4073{
4074 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4075 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
4076}
4077
4078
4079/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
4080FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
4081{
4082 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4083 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
4084}
4085
4086
4087/** Opcode 0x0f 0x59 - mulps Vps, Wps */
4088FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
4089{
4090 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4091 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
4092}
4093
4094
4095/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
4096FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
4097{
4098 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4099 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
4100}
4101
4102
4103/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
4104FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
4105{
4106 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4107 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
4108}
4109
4110
4111/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
4112FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
4113{
4114 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4115 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
4116}
4117
4118
4119/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
4120FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps);
4121/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
4122FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd);
4123/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
4124FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss);
4125/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
4126FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd);
4127
4128/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
4129FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq);
4130/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
4131FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps);
4132/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
4133FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps);
4134/* Opcode 0xf2 0x0f 0x5b - invalid */
4135
4136
4137/** Opcode 0x0f 0x5c - subps Vps, Wps */
4138FNIEMOP_DEF(iemOp_subps_Vps_Wps)
4139{
4140 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4141 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
4142}
4143
4144
4145/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
4146FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
4147{
4148 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4149 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
4150}
4151
4152
4153/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
4154FNIEMOP_DEF(iemOp_subss_Vss_Wss)
4155{
4156 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
4157 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
4158}
4159
4160
4161/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
4162FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
4163{
4164 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
4165 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
4166}
4167
4168
4169/** Opcode 0x0f 0x5d - minps Vps, Wps */
4170FNIEMOP_DEF(iemOp_minps_Vps_Wps)
4171{
4172 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4173 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
4174}
4175
4176
4177/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
4178FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
4179{
4180 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4181 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
4182}
4183
4184
4185/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
4186FNIEMOP_STUB(iemOp_minss_Vss_Wss);
4187/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
4188FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd);
4189
4190
4191/** Opcode 0x0f 0x5e - divps Vps, Wps */
4192FNIEMOP_DEF(iemOp_divps_Vps_Wps)
4193{
4194 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4195 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
4196}
4197
4198
4199/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
4200FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
4201{
4202 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4203 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
4204}
4205
4206
4207/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
4208FNIEMOP_STUB(iemOp_divss_Vss_Wss);
4209/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
4210FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd);
4211
4212
4213/** Opcode 0x0f 0x5f - maxps Vps, Wps */
4214FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
4215{
4216 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
4217 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
4218}
4219
4220
4221/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
4222FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
4223{
4224 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
4225 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
4226}
4227
4228
4229/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
4230FNIEMOP_STUB(iemOp_maxss_Vss_Wss);
4231/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
4232FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd);
4233
4234
4235/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
4236FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
4237{
4238 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4239 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
4240}
4241
4242
4243/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
4244FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
4245{
4246 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4247 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
4248}
4249
4250
4251/* Opcode 0xf3 0x0f 0x60 - invalid */
4252
4253
4254/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
4255FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
4256{
4257 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
4258 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4259 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
4260}
4261
4262
4263/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
4264FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
4265{
4266 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4267 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
4268}
4269
4270
4271/* Opcode 0xf3 0x0f 0x61 - invalid */
4272
4273
4274/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
4275FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
4276{
4277 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4278 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
4279}
4280
4281
4282/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
4283FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
4284{
4285 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4286 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
4287}
4288
4289
4290/* Opcode 0xf3 0x0f 0x62 - invalid */
4291
4292
4293
4294/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
4295FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
4296{
4297 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4298 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
4299}
4300
4301
4302/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
4303FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
4304{
4305 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4306 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
4307}
4308
4309
4310/* Opcode 0xf3 0x0f 0x63 - invalid */
4311
4312
4313/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
4314FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
4315{
4316 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4317 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
4318}
4319
4320
4321/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
4322FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
4323{
4324 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4325 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
4326}
4327
4328
4329/* Opcode 0xf3 0x0f 0x64 - invalid */
4330
4331
4332/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
4333FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
4334{
4335 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4336 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
4337}
4338
4339
4340/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
4341FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
4342{
4343 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4344 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
4345}
4346
4347
4348/* Opcode 0xf3 0x0f 0x65 - invalid */
4349
4350
4351/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
4352FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
4353{
4354 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4355 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
4356}
4357
4358
4359/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
4360FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
4361{
4362 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4363 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
4364}
4365
4366
4367/* Opcode 0xf3 0x0f 0x66 - invalid */
4368
4369
4370/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
4371FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
4372{
4373 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4374 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
4375}
4376
4377
4378/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
4379FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
4380{
4381 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4382 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
4383}
4384
4385
4386/* Opcode 0xf3 0x0f 0x67 - invalid */
4387
4388
4389/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
4390 * @note Intel and AMD both uses Qd for the second parameter, however they
4391 * both list it as a mmX/mem64 operand and intel describes it as being
4392 * loaded as a qword, so it should be Qq, shouldn't it? */
4393FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
4394{
4395 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4396 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
4397}
4398
4399
4400/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
4401FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
4402{
4403 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4404 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
4405}
4406
4407
4408/* Opcode 0xf3 0x0f 0x68 - invalid */
4409
4410
4411/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
4412 * @note Intel and AMD both uses Qd for the second parameter, however they
4413 * both list it as a mmX/mem64 operand and intel describes it as being
4414 * loaded as a qword, so it should be Qq, shouldn't it? */
4415FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
4416{
4417 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4418 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
4419}
4420
4421
4422/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
4423FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
4424{
4425 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4426 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
4427
4428}
4429
4430
4431/* Opcode 0xf3 0x0f 0x69 - invalid */
4432
4433
4434/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
4435 * @note Intel and AMD both uses Qd for the second parameter, however they
4436 * both list it as a mmX/mem64 operand and intel describes it as being
4437 * loaded as a qword, so it should be Qq, shouldn't it? */
4438FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
4439{
4440 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4441 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
4442}
4443
4444
4445/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
4446FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
4447{
4448 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4449 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
4450}
4451
4452
4453/* Opcode 0xf3 0x0f 0x6a - invalid */
4454
4455
4456/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
4457FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
4458{
4459 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4460 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
4461}
4462
4463
4464/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
4465FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
4466{
4467 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4468 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
4469}
4470
4471
4472/* Opcode 0xf3 0x0f 0x6b - invalid */
4473
4474
4475/* Opcode 0x0f 0x6c - invalid */
4476
4477
4478/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
4479FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
4480{
4481 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4482 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
4483}
4484
4485
4486/* Opcode 0xf3 0x0f 0x6c - invalid */
4487/* Opcode 0xf2 0x0f 0x6c - invalid */
4488
4489
4490/* Opcode 0x0f 0x6d - invalid */
4491
4492
4493/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
4494FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
4495{
4496 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
4497 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
4498}
4499
4500
4501/* Opcode 0xf3 0x0f 0x6d - invalid */
4502
4503
4504FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
4505{
4506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4507 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4508 {
4509 /**
4510 * @opcode 0x6e
4511 * @opcodesub rex.w=1
4512 * @oppfx none
4513 * @opcpuid mmx
4514 * @opgroup og_mmx_datamove
4515 * @opxcpttype 5
4516 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
4517 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
4518 */
4519 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4520 if (IEM_IS_MODRM_REG_MODE(bRm))
4521 {
4522 /* MMX, greg64 */
4523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4524 IEM_MC_BEGIN(0, 1);
4525 IEM_MC_LOCAL(uint64_t, u64Tmp);
4526
4527 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4528 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4529
4530 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4531 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4532 IEM_MC_FPU_TO_MMX_MODE();
4533
4534 IEM_MC_ADVANCE_RIP();
4535 IEM_MC_END();
4536 }
4537 else
4538 {
4539 /* MMX, [mem64] */
4540 IEM_MC_BEGIN(0, 2);
4541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4542 IEM_MC_LOCAL(uint64_t, u64Tmp);
4543
4544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4546 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4547 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4548
4549 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4550 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4551 IEM_MC_FPU_TO_MMX_MODE();
4552
4553 IEM_MC_ADVANCE_RIP();
4554 IEM_MC_END();
4555 }
4556 }
4557 else
4558 {
4559 /**
4560 * @opdone
4561 * @opcode 0x6e
4562 * @opcodesub rex.w=0
4563 * @oppfx none
4564 * @opcpuid mmx
4565 * @opgroup og_mmx_datamove
4566 * @opxcpttype 5
4567 * @opfunction iemOp_movd_q_Pd_Ey
4568 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4569 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4570 */
4571 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
4572 if (IEM_IS_MODRM_REG_MODE(bRm))
4573 {
4574 /* MMX, greg */
4575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4576 IEM_MC_BEGIN(0, 1);
4577 IEM_MC_LOCAL(uint64_t, u64Tmp);
4578
4579 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4580 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4581
4582 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4583 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4584 IEM_MC_FPU_TO_MMX_MODE();
4585
4586 IEM_MC_ADVANCE_RIP();
4587 IEM_MC_END();
4588 }
4589 else
4590 {
4591 /* MMX, [mem] */
4592 IEM_MC_BEGIN(0, 2);
4593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4594 IEM_MC_LOCAL(uint32_t, u32Tmp);
4595
4596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4598 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4599 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4600
4601 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4602 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
4603 IEM_MC_FPU_TO_MMX_MODE();
4604
4605 IEM_MC_ADVANCE_RIP();
4606 IEM_MC_END();
4607 }
4608 }
4609 return VINF_SUCCESS;
4610}
4611
4612FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
4613{
4614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4615 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4616 {
4617 /**
4618 * @opcode 0x6e
4619 * @opcodesub rex.w=1
4620 * @oppfx 0x66
4621 * @opcpuid sse2
4622 * @opgroup og_sse2_simdint_datamove
4623 * @opxcpttype 5
4624 * @optest 64-bit / op1=1 op2=2 -> op1=2
4625 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4626 */
4627 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4628 if (IEM_IS_MODRM_REG_MODE(bRm))
4629 {
4630 /* XMM, greg64 */
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 IEM_MC_BEGIN(0, 1);
4633 IEM_MC_LOCAL(uint64_t, u64Tmp);
4634
4635 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4637
4638 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4639 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4640
4641 IEM_MC_ADVANCE_RIP();
4642 IEM_MC_END();
4643 }
4644 else
4645 {
4646 /* XMM, [mem64] */
4647 IEM_MC_BEGIN(0, 2);
4648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4649 IEM_MC_LOCAL(uint64_t, u64Tmp);
4650
4651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4653 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4654 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4655
4656 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4657 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4658
4659 IEM_MC_ADVANCE_RIP();
4660 IEM_MC_END();
4661 }
4662 }
4663 else
4664 {
4665 /**
4666 * @opdone
4667 * @opcode 0x6e
4668 * @opcodesub rex.w=0
4669 * @oppfx 0x66
4670 * @opcpuid sse2
4671 * @opgroup og_sse2_simdint_datamove
4672 * @opxcpttype 5
4673 * @opfunction iemOp_movd_q_Vy_Ey
4674 * @optest op1=1 op2=2 -> op1=2
4675 * @optest op1=0 op2=-42 -> op1=-42
4676 */
4677 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
4678 if (IEM_IS_MODRM_REG_MODE(bRm))
4679 {
4680 /* XMM, greg32 */
4681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4682 IEM_MC_BEGIN(0, 1);
4683 IEM_MC_LOCAL(uint32_t, u32Tmp);
4684
4685 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4686 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4687
4688 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4689 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4690
4691 IEM_MC_ADVANCE_RIP();
4692 IEM_MC_END();
4693 }
4694 else
4695 {
4696 /* XMM, [mem32] */
4697 IEM_MC_BEGIN(0, 2);
4698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4699 IEM_MC_LOCAL(uint32_t, u32Tmp);
4700
4701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4703 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4704 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4705
4706 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4707 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4708
4709 IEM_MC_ADVANCE_RIP();
4710 IEM_MC_END();
4711 }
4712 }
4713 return VINF_SUCCESS;
4714}
4715
4716/* Opcode 0xf3 0x0f 0x6e - invalid */
4717
4718
4719/**
4720 * @opcode 0x6f
4721 * @oppfx none
4722 * @opcpuid mmx
4723 * @opgroup og_mmx_datamove
4724 * @opxcpttype 5
4725 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
4726 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
4727 */
4728FNIEMOP_DEF(iemOp_movq_Pq_Qq)
4729{
4730 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4732 if (IEM_IS_MODRM_REG_MODE(bRm))
4733 {
4734 /*
4735 * Register, register.
4736 */
4737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4738 IEM_MC_BEGIN(0, 1);
4739 IEM_MC_LOCAL(uint64_t, u64Tmp);
4740
4741 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4742 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4743
4744 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
4745 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4746 IEM_MC_FPU_TO_MMX_MODE();
4747
4748 IEM_MC_ADVANCE_RIP();
4749 IEM_MC_END();
4750 }
4751 else
4752 {
4753 /*
4754 * Register, memory.
4755 */
4756 IEM_MC_BEGIN(0, 2);
4757 IEM_MC_LOCAL(uint64_t, u64Tmp);
4758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4759
4760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4762 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
4763 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
4764
4765 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4766 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
4767 IEM_MC_FPU_TO_MMX_MODE();
4768
4769 IEM_MC_ADVANCE_RIP();
4770 IEM_MC_END();
4771 }
4772 return VINF_SUCCESS;
4773}
4774
4775/**
4776 * @opcode 0x6f
4777 * @oppfx 0x66
4778 * @opcpuid sse2
4779 * @opgroup og_sse2_simdint_datamove
4780 * @opxcpttype 1
4781 * @optest op1=1 op2=2 -> op1=2
4782 * @optest op1=0 op2=-42 -> op1=-42
4783 */
4784FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
4785{
4786 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4788 if (IEM_IS_MODRM_REG_MODE(bRm))
4789 {
4790 /*
4791 * Register, register.
4792 */
4793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4794 IEM_MC_BEGIN(0, 0);
4795
4796 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4798
4799 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4800 IEM_GET_MODRM_RM(pVCpu, bRm));
4801 IEM_MC_ADVANCE_RIP();
4802 IEM_MC_END();
4803 }
4804 else
4805 {
4806 /*
4807 * Register, memory.
4808 */
4809 IEM_MC_BEGIN(0, 2);
4810 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4812
4813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4816 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4817
4818 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4819 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4820
4821 IEM_MC_ADVANCE_RIP();
4822 IEM_MC_END();
4823 }
4824 return VINF_SUCCESS;
4825}
4826
4827/**
4828 * @opcode 0x6f
4829 * @oppfx 0xf3
4830 * @opcpuid sse2
4831 * @opgroup og_sse2_simdint_datamove
4832 * @opxcpttype 4UA
4833 * @optest op1=1 op2=2 -> op1=2
4834 * @optest op1=0 op2=-42 -> op1=-42
4835 */
4836FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
4837{
4838 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4840 if (IEM_IS_MODRM_REG_MODE(bRm))
4841 {
4842 /*
4843 * Register, register.
4844 */
4845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4846 IEM_MC_BEGIN(0, 0);
4847 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4848 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4849 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
4850 IEM_GET_MODRM_RM(pVCpu, bRm));
4851 IEM_MC_ADVANCE_RIP();
4852 IEM_MC_END();
4853 }
4854 else
4855 {
4856 /*
4857 * Register, memory.
4858 */
4859 IEM_MC_BEGIN(0, 2);
4860 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4862
4863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4865 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4866 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4867 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4868 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4869
4870 IEM_MC_ADVANCE_RIP();
4871 IEM_MC_END();
4872 }
4873 return VINF_SUCCESS;
4874}
4875
4876
4877/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
4878FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
4879{
4880 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
4881 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4882 if (IEM_IS_MODRM_REG_MODE(bRm))
4883 {
4884 /*
4885 * Register, register.
4886 */
4887 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4889
4890 IEM_MC_BEGIN(3, 0);
4891 IEM_MC_ARG(uint64_t *, pDst, 0);
4892 IEM_MC_ARG(uint64_t const *, pSrc, 1);
4893 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4894 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4895 IEM_MC_PREPARE_FPU_USAGE();
4896 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4897 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
4898 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4899 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4900 IEM_MC_FPU_TO_MMX_MODE();
4901 IEM_MC_ADVANCE_RIP();
4902 IEM_MC_END();
4903 }
4904 else
4905 {
4906 /*
4907 * Register, memory.
4908 */
4909 IEM_MC_BEGIN(3, 2);
4910 IEM_MC_ARG(uint64_t *, pDst, 0);
4911 IEM_MC_LOCAL(uint64_t, uSrc);
4912 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
4913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4914
4915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4916 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4917 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4919 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
4920
4921 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4922 IEM_MC_PREPARE_FPU_USAGE();
4923 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
4924 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bEvilArg);
4925 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
4926 IEM_MC_FPU_TO_MMX_MODE();
4927
4928 IEM_MC_ADVANCE_RIP();
4929 IEM_MC_END();
4930 }
4931 return VINF_SUCCESS;
4932}
4933
4934
4935/**
4936 * Common worker for SSE2 instructions on the forms:
4937 * pshufd xmm1, xmm2/mem128, imm8
4938 * pshufhw xmm1, xmm2/mem128, imm8
4939 * pshuflw xmm1, xmm2/mem128, imm8
4940 *
4941 * Proper alignment of the 128-bit operand is enforced.
4942 * Exceptions type 4. SSE2 cpuid checks.
4943 */
4944FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
4945{
4946 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4947 if (IEM_IS_MODRM_REG_MODE(bRm))
4948 {
4949 /*
4950 * Register, register.
4951 */
4952 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4954
4955 IEM_MC_BEGIN(3, 0);
4956 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4957 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4958 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4959 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4960 IEM_MC_PREPARE_SSE_USAGE();
4961 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4962 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4963 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4964 IEM_MC_ADVANCE_RIP();
4965 IEM_MC_END();
4966 }
4967 else
4968 {
4969 /*
4970 * Register, memory.
4971 */
4972 IEM_MC_BEGIN(3, 2);
4973 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4974 IEM_MC_LOCAL(RTUINT128U, uSrc);
4975 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4977
4978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4979 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4980 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
4983
4984 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4985 IEM_MC_PREPARE_SSE_USAGE();
4986 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4987 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bEvilArg);
4988
4989 IEM_MC_ADVANCE_RIP();
4990 IEM_MC_END();
4991 }
4992 return VINF_SUCCESS;
4993}
4994
4995
4996/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
4997FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
4998{
4999 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5000 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
5001}
5002
5003
5004/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
5005FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
5006{
5007 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5008 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
5009}
5010
5011
5012/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
5013FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
5014{
5015 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5016 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
5017}
5018
5019
5020/**
5021 * Common worker for MMX instructions of the form:
5022 * psrlw mm, imm8
5023 * psraw mm, imm8
5024 * psllw mm, imm8
5025 * psrld mm, imm8
5026 * psrad mm, imm8
5027 * pslld mm, imm8
5028 * psrlq mm, imm8
5029 * psllq mm, imm8
5030 *
5031 */
5032FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
5033{
5034 if (IEM_IS_MODRM_REG_MODE(bRm))
5035 {
5036 /*
5037 * Register, immediate.
5038 */
5039 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5041
5042 IEM_MC_BEGIN(2, 0);
5043 IEM_MC_ARG(uint64_t *, pDst, 0);
5044 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
5045 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5046 IEM_MC_PREPARE_FPU_USAGE();
5047 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
5048 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
5049 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
5050 IEM_MC_FPU_TO_MMX_MODE();
5051 IEM_MC_ADVANCE_RIP();
5052 IEM_MC_END();
5053 }
5054 else
5055 {
5056 /*
5057 * Register, memory not supported.
5058 */
5059 /// @todo Caller already enforced register mode?!
5060 }
5061 return VINF_SUCCESS;
5062}
5063
5064
5065/**
5066 * Common worker for SSE2 instructions of the form:
5067 * psrlw xmm, imm8
5068 * psraw xmm, imm8
5069 * psllw xmm, imm8
5070 * psrld xmm, imm8
5071 * psrad xmm, imm8
5072 * pslld xmm, imm8
5073 * psrlq xmm, imm8
5074 * psllq xmm, imm8
5075 *
5076 */
5077FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, FNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
5078{
5079 if (IEM_IS_MODRM_REG_MODE(bRm))
5080 {
5081 /*
5082 * Register, immediate.
5083 */
5084 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5086
5087 IEM_MC_BEGIN(2, 0);
5088 IEM_MC_ARG(PRTUINT128U, pDst, 0);
5089 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
5090 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5091 IEM_MC_PREPARE_SSE_USAGE();
5092 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
5094 IEM_MC_ADVANCE_RIP();
5095 IEM_MC_END();
5096 }
5097 else
5098 {
5099 /*
5100 * Register, memory.
5101 */
5102 /// @todo Caller already enforced register mode?!
5103 }
5104 return VINF_SUCCESS;
5105}
5106
5107
5108/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
5109FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
5110{
5111// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5112 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
5113}
5114
5115
5116/** Opcode 0x66 0x0f 0x71 11/2. */
5117FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
5118{
5119// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5120 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
5121}
5122
5123
5124/** Opcode 0x0f 0x71 11/4. */
5125FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
5126{
5127// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5128 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
5129}
5130
5131
5132/** Opcode 0x66 0x0f 0x71 11/4. */
5133FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
5134{
5135// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5136 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
5137}
5138
5139
5140/** Opcode 0x0f 0x71 11/6. */
5141FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
5142{
5143// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5144 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
5145}
5146
5147
5148/** Opcode 0x66 0x0f 0x71 11/6. */
5149FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
5150{
5151// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5152 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
5153}
5154
5155
5156/**
5157 * Group 12 jump table for register variant.
5158 */
5159IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
5160{
5161 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5162 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5163 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5164 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5165 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5166 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5167 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5168 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5169};
5170AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
5171
5172
5173/** Opcode 0x0f 0x71. */
5174FNIEMOP_DEF(iemOp_Grp12)
5175{
5176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5177 if (IEM_IS_MODRM_REG_MODE(bRm))
5178 /* register, register */
5179 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5180 + pVCpu->iem.s.idxPrefix], bRm);
5181 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5182}
5183
5184
5185/** Opcode 0x0f 0x72 11/2. */
5186FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
5187{
5188// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5189 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
5190}
5191
5192
5193/** Opcode 0x66 0x0f 0x72 11/2. */
5194FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
5195{
5196// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5197 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
5198}
5199
5200
5201/** Opcode 0x0f 0x72 11/4. */
5202FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
5203{
5204// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5205 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
5206}
5207
5208
5209/** Opcode 0x66 0x0f 0x72 11/4. */
5210FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
5211{
5212// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5213 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
5214}
5215
5216
5217/** Opcode 0x0f 0x72 11/6. */
5218FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
5219{
5220// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5221 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
5222}
5223
5224/** Opcode 0x66 0x0f 0x72 11/6. */
5225FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
5226{
5227// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5228 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
5229}
5230
5231
5232/**
5233 * Group 13 jump table for register variant.
5234 */
5235IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
5236{
5237 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5238 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5239 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5240 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5241 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5242 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5243 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5244 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
5245};
5246AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
5247
5248/** Opcode 0x0f 0x72. */
5249FNIEMOP_DEF(iemOp_Grp13)
5250{
5251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5252 if (IEM_IS_MODRM_REG_MODE(bRm))
5253 /* register, register */
5254 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5255 + pVCpu->iem.s.idxPrefix], bRm);
5256 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5257}
5258
5259
5260/** Opcode 0x0f 0x73 11/2. */
5261FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
5262{
5263// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5264 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
5265}
5266
5267
5268/** Opcode 0x66 0x0f 0x73 11/2. */
5269FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
5270{
5271// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5272 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
5273}
5274
5275
5276/** Opcode 0x66 0x0f 0x73 11/3. */
5277FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
5278{
5279// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5280 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
5281}
5282
5283
5284/** Opcode 0x0f 0x73 11/6. */
5285FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
5286{
5287// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
5288 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
5289}
5290
5291
5292/** Opcode 0x66 0x0f 0x73 11/6. */
5293FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
5294{
5295// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5296 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
5297}
5298
5299
5300/** Opcode 0x66 0x0f 0x73 11/7. */
5301FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
5302{
5303// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
5304 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
5305}
5306
5307/**
5308 * Group 14 jump table for register variant.
5309 */
5310IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
5311{
5312 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5313 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5314 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5315 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5316 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5317 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5318 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5319 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5320};
5321AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
5322
5323
5324/** Opcode 0x0f 0x73. */
5325FNIEMOP_DEF(iemOp_Grp14)
5326{
5327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5328 if (IEM_IS_MODRM_REG_MODE(bRm))
5329 /* register, register */
5330 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5331 + pVCpu->iem.s.idxPrefix], bRm);
5332 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5333}
5334
5335
5336/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
5337FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
5338{
5339 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5340 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
5341}
5342
5343
5344/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
5345FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
5346{
5347 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5348 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
5349}
5350
5351
5352/* Opcode 0xf3 0x0f 0x74 - invalid */
5353/* Opcode 0xf2 0x0f 0x74 - invalid */
5354
5355
5356/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
5357FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
5358{
5359 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5360 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
5361}
5362
5363
5364/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
5365FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
5366{
5367 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5368 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
5369}
5370
5371
5372/* Opcode 0xf3 0x0f 0x75 - invalid */
5373/* Opcode 0xf2 0x0f 0x75 - invalid */
5374
5375
5376/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
5377FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
5378{
5379 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5380 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
5381}
5382
5383
5384/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
5385FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
5386{
5387 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5388 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
5389}
5390
5391
5392/* Opcode 0xf3 0x0f 0x76 - invalid */
5393/* Opcode 0xf2 0x0f 0x76 - invalid */
5394
5395
5396/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
5397FNIEMOP_DEF(iemOp_emms)
5398{
5399 IEMOP_MNEMONIC(emms, "emms");
5400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5401
5402 IEM_MC_BEGIN(0,0);
5403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
5404 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5405 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5406 IEM_MC_FPU_FROM_MMX_MODE();
5407 IEM_MC_ADVANCE_RIP();
5408 IEM_MC_END();
5409 return VINF_SUCCESS;
5410}
5411
5412/* Opcode 0x66 0x0f 0x77 - invalid */
5413/* Opcode 0xf3 0x0f 0x77 - invalid */
5414/* Opcode 0xf2 0x0f 0x77 - invalid */
5415
5416/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
5417#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5418FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
5419{
5420 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
5421 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
5422 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
5423 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5424
5425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5426 if (IEM_IS_MODRM_REG_MODE(bRm))
5427 {
5428 /*
5429 * Register, register.
5430 */
5431 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5432 if (enmEffOpSize == IEMMODE_64BIT)
5433 {
5434 IEM_MC_BEGIN(2, 0);
5435 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5436 IEM_MC_ARG(uint64_t, u64Enc, 1);
5437 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5438 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5439 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg64, pu64Dst, u64Enc);
5440 IEM_MC_END();
5441 }
5442 else
5443 {
5444 IEM_MC_BEGIN(2, 0);
5445 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5446 IEM_MC_ARG(uint32_t, u32Enc, 1);
5447 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5448 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
5449 IEM_MC_CALL_CIMPL_2(iemCImpl_vmread_reg32, pu32Dst, u32Enc);
5450 IEM_MC_END();
5451 }
5452 }
5453 else
5454 {
5455 /*
5456 * Memory, register.
5457 */
5458 if (enmEffOpSize == IEMMODE_64BIT)
5459 {
5460 IEM_MC_BEGIN(3, 0);
5461 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5462 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5463 IEM_MC_ARG(uint64_t, u64Enc, 2);
5464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5465 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5466 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5467 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5468 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
5469 IEM_MC_END();
5470 }
5471 else
5472 {
5473 IEM_MC_BEGIN(3, 0);
5474 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5475 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5476 IEM_MC_ARG(uint32_t, u32Enc, 2);
5477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5479 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5480 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5481 IEM_MC_CALL_CIMPL_3(iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
5482 IEM_MC_END();
5483 }
5484 }
5485 return VINF_SUCCESS;
5486}
5487#else
5488FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
5489#endif
5490
5491/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
5492FNIEMOP_STUB(iemOp_AmdGrp17);
5493/* Opcode 0xf3 0x0f 0x78 - invalid */
5494/* Opcode 0xf2 0x0f 0x78 - invalid */
5495
5496/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
5497#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5498FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
5499{
5500 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
5501 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
5502 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
5503 IEMMODE const enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? IEMMODE_64BIT : IEMMODE_32BIT;
5504
5505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5506 if (IEM_IS_MODRM_REG_MODE(bRm))
5507 {
5508 /*
5509 * Register, register.
5510 */
5511 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5512 if (enmEffOpSize == IEMMODE_64BIT)
5513 {
5514 IEM_MC_BEGIN(2, 0);
5515 IEM_MC_ARG(uint64_t, u64Val, 0);
5516 IEM_MC_ARG(uint64_t, u64Enc, 1);
5517 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5518 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5519 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u64Val, u64Enc);
5520 IEM_MC_END();
5521 }
5522 else
5523 {
5524 IEM_MC_BEGIN(2, 0);
5525 IEM_MC_ARG(uint32_t, u32Val, 0);
5526 IEM_MC_ARG(uint32_t, u32Enc, 1);
5527 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
5528 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5529 IEM_MC_CALL_CIMPL_2(iemCImpl_vmwrite_reg, u32Val, u32Enc);
5530 IEM_MC_END();
5531 }
5532 }
5533 else
5534 {
5535 /*
5536 * Register, memory.
5537 */
5538 if (enmEffOpSize == IEMMODE_64BIT)
5539 {
5540 IEM_MC_BEGIN(3, 0);
5541 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5542 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5543 IEM_MC_ARG(uint64_t, u64Enc, 2);
5544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5545 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5546 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5547 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5548 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
5549 IEM_MC_END();
5550 }
5551 else
5552 {
5553 IEM_MC_BEGIN(3, 0);
5554 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5555 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
5556 IEM_MC_ARG(uint32_t, u32Enc, 2);
5557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
5558 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
5559 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
5560 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5561 IEM_MC_CALL_CIMPL_3(iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
5562 IEM_MC_END();
5563 }
5564 }
5565 return VINF_SUCCESS;
5566}
5567#else
5568FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
5569#endif
5570/* Opcode 0x66 0x0f 0x79 - invalid */
5571/* Opcode 0xf3 0x0f 0x79 - invalid */
5572/* Opcode 0xf2 0x0f 0x79 - invalid */
5573
5574/* Opcode 0x0f 0x7a - invalid */
5575/* Opcode 0x66 0x0f 0x7a - invalid */
5576/* Opcode 0xf3 0x0f 0x7a - invalid */
5577/* Opcode 0xf2 0x0f 0x7a - invalid */
5578
5579/* Opcode 0x0f 0x7b - invalid */
5580/* Opcode 0x66 0x0f 0x7b - invalid */
5581/* Opcode 0xf3 0x0f 0x7b - invalid */
5582/* Opcode 0xf2 0x0f 0x7b - invalid */
5583
5584/* Opcode 0x0f 0x7c - invalid */
5585/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
5586FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd);
5587/* Opcode 0xf3 0x0f 0x7c - invalid */
5588/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
5589FNIEMOP_STUB(iemOp_haddps_Vps_Wps);
5590
5591/* Opcode 0x0f 0x7d - invalid */
5592/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
5593FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd);
5594/* Opcode 0xf3 0x0f 0x7d - invalid */
5595/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
5596FNIEMOP_STUB(iemOp_hsubps_Vps_Wps);
5597
5598
5599/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
5600FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
5601{
5602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5603 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5604 {
5605 /**
5606 * @opcode 0x7e
5607 * @opcodesub rex.w=1
5608 * @oppfx none
5609 * @opcpuid mmx
5610 * @opgroup og_mmx_datamove
5611 * @opxcpttype 5
5612 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
5613 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
5614 */
5615 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5616 if (IEM_IS_MODRM_REG_MODE(bRm))
5617 {
5618 /* greg64, MMX */
5619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5620 IEM_MC_BEGIN(0, 1);
5621 IEM_MC_LOCAL(uint64_t, u64Tmp);
5622
5623 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5624 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5625
5626 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5627 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5628 IEM_MC_FPU_TO_MMX_MODE();
5629
5630 IEM_MC_ADVANCE_RIP();
5631 IEM_MC_END();
5632 }
5633 else
5634 {
5635 /* [mem64], MMX */
5636 IEM_MC_BEGIN(0, 2);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5638 IEM_MC_LOCAL(uint64_t, u64Tmp);
5639
5640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5642 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5643 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5644
5645 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5646 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5647 IEM_MC_FPU_TO_MMX_MODE();
5648
5649 IEM_MC_ADVANCE_RIP();
5650 IEM_MC_END();
5651 }
5652 }
5653 else
5654 {
5655 /**
5656 * @opdone
5657 * @opcode 0x7e
5658 * @opcodesub rex.w=0
5659 * @oppfx none
5660 * @opcpuid mmx
5661 * @opgroup og_mmx_datamove
5662 * @opxcpttype 5
5663 * @opfunction iemOp_movd_q_Pd_Ey
5664 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
5665 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
5666 */
5667 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX);
5668 if (IEM_IS_MODRM_REG_MODE(bRm))
5669 {
5670 /* greg32, MMX */
5671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5672 IEM_MC_BEGIN(0, 1);
5673 IEM_MC_LOCAL(uint32_t, u32Tmp);
5674
5675 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5676 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5677
5678 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5679 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5680 IEM_MC_FPU_TO_MMX_MODE();
5681
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 }
5685 else
5686 {
5687 /* [mem32], MMX */
5688 IEM_MC_BEGIN(0, 2);
5689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5690 IEM_MC_LOCAL(uint32_t, u32Tmp);
5691
5692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5694 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5695 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5696
5697 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
5698 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5699 IEM_MC_FPU_TO_MMX_MODE();
5700
5701 IEM_MC_ADVANCE_RIP();
5702 IEM_MC_END();
5703 }
5704 }
5705 return VINF_SUCCESS;
5706
5707}
5708
5709
5710FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
5711{
5712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5714 {
5715 /**
5716 * @opcode 0x7e
5717 * @opcodesub rex.w=1
5718 * @oppfx 0x66
5719 * @opcpuid sse2
5720 * @opgroup og_sse2_simdint_datamove
5721 * @opxcpttype 5
5722 * @optest 64-bit / op1=1 op2=2 -> op1=2
5723 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5724 */
5725 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5726 if (IEM_IS_MODRM_REG_MODE(bRm))
5727 {
5728 /* greg64, XMM */
5729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5730 IEM_MC_BEGIN(0, 1);
5731 IEM_MC_LOCAL(uint64_t, u64Tmp);
5732
5733 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5734 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5735
5736 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5737 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5738
5739 IEM_MC_ADVANCE_RIP();
5740 IEM_MC_END();
5741 }
5742 else
5743 {
5744 /* [mem64], XMM */
5745 IEM_MC_BEGIN(0, 2);
5746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5747 IEM_MC_LOCAL(uint64_t, u64Tmp);
5748
5749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5751 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5752 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5753
5754 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5755 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5756
5757 IEM_MC_ADVANCE_RIP();
5758 IEM_MC_END();
5759 }
5760 }
5761 else
5762 {
5763 /**
5764 * @opdone
5765 * @opcode 0x7e
5766 * @opcodesub rex.w=0
5767 * @oppfx 0x66
5768 * @opcpuid sse2
5769 * @opgroup og_sse2_simdint_datamove
5770 * @opxcpttype 5
5771 * @opfunction iemOp_movd_q_Vy_Ey
5772 * @optest op1=1 op2=2 -> op1=2
5773 * @optest op1=0 op2=-42 -> op1=-42
5774 */
5775 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX);
5776 if (IEM_IS_MODRM_REG_MODE(bRm))
5777 {
5778 /* greg32, XMM */
5779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5780 IEM_MC_BEGIN(0, 1);
5781 IEM_MC_LOCAL(uint32_t, u32Tmp);
5782
5783 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5784 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5785
5786 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5787 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5788
5789 IEM_MC_ADVANCE_RIP();
5790 IEM_MC_END();
5791 }
5792 else
5793 {
5794 /* [mem32], XMM */
5795 IEM_MC_BEGIN(0, 2);
5796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5797 IEM_MC_LOCAL(uint32_t, u32Tmp);
5798
5799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5801 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5802 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5803
5804 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5805 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5806
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 }
5810 }
5811 return VINF_SUCCESS;
5812
5813}
5814
5815/**
5816 * @opcode 0x7e
5817 * @oppfx 0xf3
5818 * @opcpuid sse2
5819 * @opgroup og_sse2_pcksclr_datamove
5820 * @opxcpttype none
5821 * @optest op1=1 op2=2 -> op1=2
5822 * @optest op1=0 op2=-42 -> op1=-42
5823 */
5824FNIEMOP_DEF(iemOp_movq_Vq_Wq)
5825{
5826 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5828 if (IEM_IS_MODRM_REG_MODE(bRm))
5829 {
5830 /*
5831 * Register, register.
5832 */
5833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5834 IEM_MC_BEGIN(0, 2);
5835 IEM_MC_LOCAL(uint64_t, uSrc);
5836
5837 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5838 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5839
5840 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5841 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5842
5843 IEM_MC_ADVANCE_RIP();
5844 IEM_MC_END();
5845 }
5846 else
5847 {
5848 /*
5849 * Memory, register.
5850 */
5851 IEM_MC_BEGIN(0, 2);
5852 IEM_MC_LOCAL(uint64_t, uSrc);
5853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5854
5855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5857 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5858 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5859
5860 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5861 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5862
5863 IEM_MC_ADVANCE_RIP();
5864 IEM_MC_END();
5865 }
5866 return VINF_SUCCESS;
5867}
5868
5869/* Opcode 0xf2 0x0f 0x7e - invalid */
5870
5871
5872/** Opcode 0x0f 0x7f - movq Qq, Pq */
5873FNIEMOP_DEF(iemOp_movq_Qq_Pq)
5874{
5875 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
5876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5877 if (IEM_IS_MODRM_REG_MODE(bRm))
5878 {
5879 /*
5880 * Register, register.
5881 */
5882 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
5883 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
5884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5885 IEM_MC_BEGIN(0, 1);
5886 IEM_MC_LOCAL(uint64_t, u64Tmp);
5887 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5888 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5889 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5890 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
5891 IEM_MC_FPU_TO_MMX_MODE();
5892 IEM_MC_ADVANCE_RIP();
5893 IEM_MC_END();
5894 }
5895 else
5896 {
5897 /*
5898 * Memory, Register.
5899 */
5900 IEM_MC_BEGIN(0, 2);
5901 IEM_MC_LOCAL(uint64_t, u64Tmp);
5902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5903
5904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5906 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
5907 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
5908
5909 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
5910 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5911 IEM_MC_FPU_TO_MMX_MODE();
5912
5913 IEM_MC_ADVANCE_RIP();
5914 IEM_MC_END();
5915 }
5916 return VINF_SUCCESS;
5917}
5918
5919/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
5920FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
5921{
5922 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5924 if (IEM_IS_MODRM_REG_MODE(bRm))
5925 {
5926 /*
5927 * Register, register.
5928 */
5929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5930 IEM_MC_BEGIN(0, 0);
5931 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5932 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5933 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5934 IEM_GET_MODRM_REG(pVCpu, bRm));
5935 IEM_MC_ADVANCE_RIP();
5936 IEM_MC_END();
5937 }
5938 else
5939 {
5940 /*
5941 * Register, memory.
5942 */
5943 IEM_MC_BEGIN(0, 2);
5944 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5946
5947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5949 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5950 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5951
5952 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5953 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5954
5955 IEM_MC_ADVANCE_RIP();
5956 IEM_MC_END();
5957 }
5958 return VINF_SUCCESS;
5959}
5960
5961/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
5962FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
5963{
5964 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5966 if (IEM_IS_MODRM_REG_MODE(bRm))
5967 {
5968 /*
5969 * Register, register.
5970 */
5971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5972 IEM_MC_BEGIN(0, 0);
5973 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5974 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
5975 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
5976 IEM_GET_MODRM_REG(pVCpu, bRm));
5977 IEM_MC_ADVANCE_RIP();
5978 IEM_MC_END();
5979 }
5980 else
5981 {
5982 /*
5983 * Register, memory.
5984 */
5985 IEM_MC_BEGIN(0, 2);
5986 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5988
5989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5991 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
5992 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5993
5994 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5995 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5996
5997 IEM_MC_ADVANCE_RIP();
5998 IEM_MC_END();
5999 }
6000 return VINF_SUCCESS;
6001}
6002
6003/* Opcode 0xf2 0x0f 0x7f - invalid */
6004
6005
6006
6007/** Opcode 0x0f 0x80. */
6008FNIEMOP_DEF(iemOp_jo_Jv)
6009{
6010 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
6011 IEMOP_HLP_MIN_386();
6012 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6013 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6014 {
6015 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6017
6018 IEM_MC_BEGIN(0, 0);
6019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6020 IEM_MC_REL_JMP_S16(i16Imm);
6021 } IEM_MC_ELSE() {
6022 IEM_MC_ADVANCE_RIP();
6023 } IEM_MC_ENDIF();
6024 IEM_MC_END();
6025 }
6026 else
6027 {
6028 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030
6031 IEM_MC_BEGIN(0, 0);
6032 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6033 IEM_MC_REL_JMP_S32(i32Imm);
6034 } IEM_MC_ELSE() {
6035 IEM_MC_ADVANCE_RIP();
6036 } IEM_MC_ENDIF();
6037 IEM_MC_END();
6038 }
6039 return VINF_SUCCESS;
6040}
6041
6042
6043/** Opcode 0x0f 0x81. */
6044FNIEMOP_DEF(iemOp_jno_Jv)
6045{
6046 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
6047 IEMOP_HLP_MIN_386();
6048 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6049 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6050 {
6051 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6053
6054 IEM_MC_BEGIN(0, 0);
6055 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6056 IEM_MC_ADVANCE_RIP();
6057 } IEM_MC_ELSE() {
6058 IEM_MC_REL_JMP_S16(i16Imm);
6059 } IEM_MC_ENDIF();
6060 IEM_MC_END();
6061 }
6062 else
6063 {
6064 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6066
6067 IEM_MC_BEGIN(0, 0);
6068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6069 IEM_MC_ADVANCE_RIP();
6070 } IEM_MC_ELSE() {
6071 IEM_MC_REL_JMP_S32(i32Imm);
6072 } IEM_MC_ENDIF();
6073 IEM_MC_END();
6074 }
6075 return VINF_SUCCESS;
6076}
6077
6078
6079/** Opcode 0x0f 0x82. */
6080FNIEMOP_DEF(iemOp_jc_Jv)
6081{
6082 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
6083 IEMOP_HLP_MIN_386();
6084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6085 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6086 {
6087 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6089
6090 IEM_MC_BEGIN(0, 0);
6091 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6092 IEM_MC_REL_JMP_S16(i16Imm);
6093 } IEM_MC_ELSE() {
6094 IEM_MC_ADVANCE_RIP();
6095 } IEM_MC_ENDIF();
6096 IEM_MC_END();
6097 }
6098 else
6099 {
6100 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6102
6103 IEM_MC_BEGIN(0, 0);
6104 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6105 IEM_MC_REL_JMP_S32(i32Imm);
6106 } IEM_MC_ELSE() {
6107 IEM_MC_ADVANCE_RIP();
6108 } IEM_MC_ENDIF();
6109 IEM_MC_END();
6110 }
6111 return VINF_SUCCESS;
6112}
6113
6114
6115/** Opcode 0x0f 0x83. */
6116FNIEMOP_DEF(iemOp_jnc_Jv)
6117{
6118 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
6119 IEMOP_HLP_MIN_386();
6120 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6121 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6122 {
6123 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6125
6126 IEM_MC_BEGIN(0, 0);
6127 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6128 IEM_MC_ADVANCE_RIP();
6129 } IEM_MC_ELSE() {
6130 IEM_MC_REL_JMP_S16(i16Imm);
6131 } IEM_MC_ENDIF();
6132 IEM_MC_END();
6133 }
6134 else
6135 {
6136 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6138
6139 IEM_MC_BEGIN(0, 0);
6140 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6141 IEM_MC_ADVANCE_RIP();
6142 } IEM_MC_ELSE() {
6143 IEM_MC_REL_JMP_S32(i32Imm);
6144 } IEM_MC_ENDIF();
6145 IEM_MC_END();
6146 }
6147 return VINF_SUCCESS;
6148}
6149
6150
6151/** Opcode 0x0f 0x84. */
6152FNIEMOP_DEF(iemOp_je_Jv)
6153{
6154 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
6155 IEMOP_HLP_MIN_386();
6156 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6157 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6158 {
6159 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6161
6162 IEM_MC_BEGIN(0, 0);
6163 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6164 IEM_MC_REL_JMP_S16(i16Imm);
6165 } IEM_MC_ELSE() {
6166 IEM_MC_ADVANCE_RIP();
6167 } IEM_MC_ENDIF();
6168 IEM_MC_END();
6169 }
6170 else
6171 {
6172 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6174
6175 IEM_MC_BEGIN(0, 0);
6176 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6177 IEM_MC_REL_JMP_S32(i32Imm);
6178 } IEM_MC_ELSE() {
6179 IEM_MC_ADVANCE_RIP();
6180 } IEM_MC_ENDIF();
6181 IEM_MC_END();
6182 }
6183 return VINF_SUCCESS;
6184}
6185
6186
6187/** Opcode 0x0f 0x85. */
6188FNIEMOP_DEF(iemOp_jne_Jv)
6189{
6190 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
6191 IEMOP_HLP_MIN_386();
6192 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6193 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6194 {
6195 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6197
6198 IEM_MC_BEGIN(0, 0);
6199 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6200 IEM_MC_ADVANCE_RIP();
6201 } IEM_MC_ELSE() {
6202 IEM_MC_REL_JMP_S16(i16Imm);
6203 } IEM_MC_ENDIF();
6204 IEM_MC_END();
6205 }
6206 else
6207 {
6208 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6210
6211 IEM_MC_BEGIN(0, 0);
6212 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6213 IEM_MC_ADVANCE_RIP();
6214 } IEM_MC_ELSE() {
6215 IEM_MC_REL_JMP_S32(i32Imm);
6216 } IEM_MC_ENDIF();
6217 IEM_MC_END();
6218 }
6219 return VINF_SUCCESS;
6220}
6221
6222
6223/** Opcode 0x0f 0x86. */
6224FNIEMOP_DEF(iemOp_jbe_Jv)
6225{
6226 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
6227 IEMOP_HLP_MIN_386();
6228 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6229 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6230 {
6231 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233
6234 IEM_MC_BEGIN(0, 0);
6235 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6236 IEM_MC_REL_JMP_S16(i16Imm);
6237 } IEM_MC_ELSE() {
6238 IEM_MC_ADVANCE_RIP();
6239 } IEM_MC_ENDIF();
6240 IEM_MC_END();
6241 }
6242 else
6243 {
6244 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6246
6247 IEM_MC_BEGIN(0, 0);
6248 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6249 IEM_MC_REL_JMP_S32(i32Imm);
6250 } IEM_MC_ELSE() {
6251 IEM_MC_ADVANCE_RIP();
6252 } IEM_MC_ENDIF();
6253 IEM_MC_END();
6254 }
6255 return VINF_SUCCESS;
6256}
6257
6258
6259/** Opcode 0x0f 0x87. */
6260FNIEMOP_DEF(iemOp_jnbe_Jv)
6261{
6262 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
6263 IEMOP_HLP_MIN_386();
6264 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6265 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6266 {
6267 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6269
6270 IEM_MC_BEGIN(0, 0);
6271 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6272 IEM_MC_ADVANCE_RIP();
6273 } IEM_MC_ELSE() {
6274 IEM_MC_REL_JMP_S16(i16Imm);
6275 } IEM_MC_ENDIF();
6276 IEM_MC_END();
6277 }
6278 else
6279 {
6280 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6282
6283 IEM_MC_BEGIN(0, 0);
6284 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6285 IEM_MC_ADVANCE_RIP();
6286 } IEM_MC_ELSE() {
6287 IEM_MC_REL_JMP_S32(i32Imm);
6288 } IEM_MC_ENDIF();
6289 IEM_MC_END();
6290 }
6291 return VINF_SUCCESS;
6292}
6293
6294
6295/** Opcode 0x0f 0x88. */
6296FNIEMOP_DEF(iemOp_js_Jv)
6297{
6298 IEMOP_MNEMONIC(js_Jv, "js Jv");
6299 IEMOP_HLP_MIN_386();
6300 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6301 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6302 {
6303 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6305
6306 IEM_MC_BEGIN(0, 0);
6307 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6308 IEM_MC_REL_JMP_S16(i16Imm);
6309 } IEM_MC_ELSE() {
6310 IEM_MC_ADVANCE_RIP();
6311 } IEM_MC_ENDIF();
6312 IEM_MC_END();
6313 }
6314 else
6315 {
6316 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6318
6319 IEM_MC_BEGIN(0, 0);
6320 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6321 IEM_MC_REL_JMP_S32(i32Imm);
6322 } IEM_MC_ELSE() {
6323 IEM_MC_ADVANCE_RIP();
6324 } IEM_MC_ENDIF();
6325 IEM_MC_END();
6326 }
6327 return VINF_SUCCESS;
6328}
6329
6330
6331/** Opcode 0x0f 0x89. */
6332FNIEMOP_DEF(iemOp_jns_Jv)
6333{
6334 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
6335 IEMOP_HLP_MIN_386();
6336 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6337 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6338 {
6339 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6341
6342 IEM_MC_BEGIN(0, 0);
6343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6344 IEM_MC_ADVANCE_RIP();
6345 } IEM_MC_ELSE() {
6346 IEM_MC_REL_JMP_S16(i16Imm);
6347 } IEM_MC_ENDIF();
6348 IEM_MC_END();
6349 }
6350 else
6351 {
6352 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6354
6355 IEM_MC_BEGIN(0, 0);
6356 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6357 IEM_MC_ADVANCE_RIP();
6358 } IEM_MC_ELSE() {
6359 IEM_MC_REL_JMP_S32(i32Imm);
6360 } IEM_MC_ENDIF();
6361 IEM_MC_END();
6362 }
6363 return VINF_SUCCESS;
6364}
6365
6366
6367/** Opcode 0x0f 0x8a. */
6368FNIEMOP_DEF(iemOp_jp_Jv)
6369{
6370 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
6371 IEMOP_HLP_MIN_386();
6372 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6373 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6374 {
6375 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6377
6378 IEM_MC_BEGIN(0, 0);
6379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6380 IEM_MC_REL_JMP_S16(i16Imm);
6381 } IEM_MC_ELSE() {
6382 IEM_MC_ADVANCE_RIP();
6383 } IEM_MC_ENDIF();
6384 IEM_MC_END();
6385 }
6386 else
6387 {
6388 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6390
6391 IEM_MC_BEGIN(0, 0);
6392 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6393 IEM_MC_REL_JMP_S32(i32Imm);
6394 } IEM_MC_ELSE() {
6395 IEM_MC_ADVANCE_RIP();
6396 } IEM_MC_ENDIF();
6397 IEM_MC_END();
6398 }
6399 return VINF_SUCCESS;
6400}
6401
6402
6403/** Opcode 0x0f 0x8b. */
6404FNIEMOP_DEF(iemOp_jnp_Jv)
6405{
6406 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
6407 IEMOP_HLP_MIN_386();
6408 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6409 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6410 {
6411 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6413
6414 IEM_MC_BEGIN(0, 0);
6415 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6416 IEM_MC_ADVANCE_RIP();
6417 } IEM_MC_ELSE() {
6418 IEM_MC_REL_JMP_S16(i16Imm);
6419 } IEM_MC_ENDIF();
6420 IEM_MC_END();
6421 }
6422 else
6423 {
6424 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426
6427 IEM_MC_BEGIN(0, 0);
6428 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
6429 IEM_MC_ADVANCE_RIP();
6430 } IEM_MC_ELSE() {
6431 IEM_MC_REL_JMP_S32(i32Imm);
6432 } IEM_MC_ENDIF();
6433 IEM_MC_END();
6434 }
6435 return VINF_SUCCESS;
6436}
6437
6438
6439/** Opcode 0x0f 0x8c. */
6440FNIEMOP_DEF(iemOp_jl_Jv)
6441{
6442 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
6443 IEMOP_HLP_MIN_386();
6444 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6445 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6446 {
6447 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6449
6450 IEM_MC_BEGIN(0, 0);
6451 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6452 IEM_MC_REL_JMP_S16(i16Imm);
6453 } IEM_MC_ELSE() {
6454 IEM_MC_ADVANCE_RIP();
6455 } IEM_MC_ENDIF();
6456 IEM_MC_END();
6457 }
6458 else
6459 {
6460 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6462
6463 IEM_MC_BEGIN(0, 0);
6464 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6465 IEM_MC_REL_JMP_S32(i32Imm);
6466 } IEM_MC_ELSE() {
6467 IEM_MC_ADVANCE_RIP();
6468 } IEM_MC_ENDIF();
6469 IEM_MC_END();
6470 }
6471 return VINF_SUCCESS;
6472}
6473
6474
6475/** Opcode 0x0f 0x8d. */
6476FNIEMOP_DEF(iemOp_jnl_Jv)
6477{
6478 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
6479 IEMOP_HLP_MIN_386();
6480 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6481 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6482 {
6483 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6485
6486 IEM_MC_BEGIN(0, 0);
6487 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6488 IEM_MC_ADVANCE_RIP();
6489 } IEM_MC_ELSE() {
6490 IEM_MC_REL_JMP_S16(i16Imm);
6491 } IEM_MC_ENDIF();
6492 IEM_MC_END();
6493 }
6494 else
6495 {
6496 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6498
6499 IEM_MC_BEGIN(0, 0);
6500 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
6501 IEM_MC_ADVANCE_RIP();
6502 } IEM_MC_ELSE() {
6503 IEM_MC_REL_JMP_S32(i32Imm);
6504 } IEM_MC_ENDIF();
6505 IEM_MC_END();
6506 }
6507 return VINF_SUCCESS;
6508}
6509
6510
6511/** Opcode 0x0f 0x8e. */
6512FNIEMOP_DEF(iemOp_jle_Jv)
6513{
6514 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
6515 IEMOP_HLP_MIN_386();
6516 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6517 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6518 {
6519 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6521
6522 IEM_MC_BEGIN(0, 0);
6523 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6524 IEM_MC_REL_JMP_S16(i16Imm);
6525 } IEM_MC_ELSE() {
6526 IEM_MC_ADVANCE_RIP();
6527 } IEM_MC_ENDIF();
6528 IEM_MC_END();
6529 }
6530 else
6531 {
6532 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6534
6535 IEM_MC_BEGIN(0, 0);
6536 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6537 IEM_MC_REL_JMP_S32(i32Imm);
6538 } IEM_MC_ELSE() {
6539 IEM_MC_ADVANCE_RIP();
6540 } IEM_MC_ENDIF();
6541 IEM_MC_END();
6542 }
6543 return VINF_SUCCESS;
6544}
6545
6546
6547/** Opcode 0x0f 0x8f. */
6548FNIEMOP_DEF(iemOp_jnle_Jv)
6549{
6550 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
6551 IEMOP_HLP_MIN_386();
6552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6553 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
6554 {
6555 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
6556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6557
6558 IEM_MC_BEGIN(0, 0);
6559 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6560 IEM_MC_ADVANCE_RIP();
6561 } IEM_MC_ELSE() {
6562 IEM_MC_REL_JMP_S16(i16Imm);
6563 } IEM_MC_ENDIF();
6564 IEM_MC_END();
6565 }
6566 else
6567 {
6568 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
6569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6570
6571 IEM_MC_BEGIN(0, 0);
6572 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
6573 IEM_MC_ADVANCE_RIP();
6574 } IEM_MC_ELSE() {
6575 IEM_MC_REL_JMP_S32(i32Imm);
6576 } IEM_MC_ENDIF();
6577 IEM_MC_END();
6578 }
6579 return VINF_SUCCESS;
6580}
6581
6582
6583/** Opcode 0x0f 0x90. */
6584FNIEMOP_DEF(iemOp_seto_Eb)
6585{
6586 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
6587 IEMOP_HLP_MIN_386();
6588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6589
6590 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6591 * any way. AMD says it's "unused", whatever that means. We're
6592 * ignoring for now. */
6593 if (IEM_IS_MODRM_REG_MODE(bRm))
6594 {
6595 /* register target */
6596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6597 IEM_MC_BEGIN(0, 0);
6598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6599 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6600 } IEM_MC_ELSE() {
6601 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6602 } IEM_MC_ENDIF();
6603 IEM_MC_ADVANCE_RIP();
6604 IEM_MC_END();
6605 }
6606 else
6607 {
6608 /* memory target */
6609 IEM_MC_BEGIN(0, 1);
6610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6614 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6615 } IEM_MC_ELSE() {
6616 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6617 } IEM_MC_ENDIF();
6618 IEM_MC_ADVANCE_RIP();
6619 IEM_MC_END();
6620 }
6621 return VINF_SUCCESS;
6622}
6623
6624
6625/** Opcode 0x0f 0x91. */
6626FNIEMOP_DEF(iemOp_setno_Eb)
6627{
6628 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
6629 IEMOP_HLP_MIN_386();
6630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6631
6632 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6633 * any way. AMD says it's "unused", whatever that means. We're
6634 * ignoring for now. */
6635 if (IEM_IS_MODRM_REG_MODE(bRm))
6636 {
6637 /* register target */
6638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6639 IEM_MC_BEGIN(0, 0);
6640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6641 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6642 } IEM_MC_ELSE() {
6643 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6644 } IEM_MC_ENDIF();
6645 IEM_MC_ADVANCE_RIP();
6646 IEM_MC_END();
6647 }
6648 else
6649 {
6650 /* memory target */
6651 IEM_MC_BEGIN(0, 1);
6652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
6656 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6657 } IEM_MC_ELSE() {
6658 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6659 } IEM_MC_ENDIF();
6660 IEM_MC_ADVANCE_RIP();
6661 IEM_MC_END();
6662 }
6663 return VINF_SUCCESS;
6664}
6665
6666
6667/** Opcode 0x0f 0x92. */
6668FNIEMOP_DEF(iemOp_setc_Eb)
6669{
6670 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
6671 IEMOP_HLP_MIN_386();
6672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6673
6674 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6675 * any way. AMD says it's "unused", whatever that means. We're
6676 * ignoring for now. */
6677 if (IEM_IS_MODRM_REG_MODE(bRm))
6678 {
6679 /* register target */
6680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6681 IEM_MC_BEGIN(0, 0);
6682 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6683 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6684 } IEM_MC_ELSE() {
6685 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6686 } IEM_MC_ENDIF();
6687 IEM_MC_ADVANCE_RIP();
6688 IEM_MC_END();
6689 }
6690 else
6691 {
6692 /* memory target */
6693 IEM_MC_BEGIN(0, 1);
6694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6698 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6699 } IEM_MC_ELSE() {
6700 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6701 } IEM_MC_ENDIF();
6702 IEM_MC_ADVANCE_RIP();
6703 IEM_MC_END();
6704 }
6705 return VINF_SUCCESS;
6706}
6707
6708
6709/** Opcode 0x0f 0x93. */
6710FNIEMOP_DEF(iemOp_setnc_Eb)
6711{
6712 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
6713 IEMOP_HLP_MIN_386();
6714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6715
6716 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6717 * any way. AMD says it's "unused", whatever that means. We're
6718 * ignoring for now. */
6719 if (IEM_IS_MODRM_REG_MODE(bRm))
6720 {
6721 /* register target */
6722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6723 IEM_MC_BEGIN(0, 0);
6724 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6725 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6726 } IEM_MC_ELSE() {
6727 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6728 } IEM_MC_ENDIF();
6729 IEM_MC_ADVANCE_RIP();
6730 IEM_MC_END();
6731 }
6732 else
6733 {
6734 /* memory target */
6735 IEM_MC_BEGIN(0, 1);
6736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6740 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6741 } IEM_MC_ELSE() {
6742 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6743 } IEM_MC_ENDIF();
6744 IEM_MC_ADVANCE_RIP();
6745 IEM_MC_END();
6746 }
6747 return VINF_SUCCESS;
6748}
6749
6750
6751/** Opcode 0x0f 0x94. */
6752FNIEMOP_DEF(iemOp_sete_Eb)
6753{
6754 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
6755 IEMOP_HLP_MIN_386();
6756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6757
6758 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6759 * any way. AMD says it's "unused", whatever that means. We're
6760 * ignoring for now. */
6761 if (IEM_IS_MODRM_REG_MODE(bRm))
6762 {
6763 /* register target */
6764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6765 IEM_MC_BEGIN(0, 0);
6766 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6767 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6768 } IEM_MC_ELSE() {
6769 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6770 } IEM_MC_ENDIF();
6771 IEM_MC_ADVANCE_RIP();
6772 IEM_MC_END();
6773 }
6774 else
6775 {
6776 /* memory target */
6777 IEM_MC_BEGIN(0, 1);
6778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6782 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6783 } IEM_MC_ELSE() {
6784 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6785 } IEM_MC_ENDIF();
6786 IEM_MC_ADVANCE_RIP();
6787 IEM_MC_END();
6788 }
6789 return VINF_SUCCESS;
6790}
6791
6792
6793/** Opcode 0x0f 0x95. */
6794FNIEMOP_DEF(iemOp_setne_Eb)
6795{
6796 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
6797 IEMOP_HLP_MIN_386();
6798 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6799
6800 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6801 * any way. AMD says it's "unused", whatever that means. We're
6802 * ignoring for now. */
6803 if (IEM_IS_MODRM_REG_MODE(bRm))
6804 {
6805 /* register target */
6806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6807 IEM_MC_BEGIN(0, 0);
6808 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6809 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6810 } IEM_MC_ELSE() {
6811 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6812 } IEM_MC_ENDIF();
6813 IEM_MC_ADVANCE_RIP();
6814 IEM_MC_END();
6815 }
6816 else
6817 {
6818 /* memory target */
6819 IEM_MC_BEGIN(0, 1);
6820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
6824 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6825 } IEM_MC_ELSE() {
6826 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6827 } IEM_MC_ENDIF();
6828 IEM_MC_ADVANCE_RIP();
6829 IEM_MC_END();
6830 }
6831 return VINF_SUCCESS;
6832}
6833
6834
6835/** Opcode 0x0f 0x96. */
6836FNIEMOP_DEF(iemOp_setbe_Eb)
6837{
6838 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
6839 IEMOP_HLP_MIN_386();
6840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6841
6842 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6843 * any way. AMD says it's "unused", whatever that means. We're
6844 * ignoring for now. */
6845 if (IEM_IS_MODRM_REG_MODE(bRm))
6846 {
6847 /* register target */
6848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6849 IEM_MC_BEGIN(0, 0);
6850 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6851 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6852 } IEM_MC_ELSE() {
6853 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6854 } IEM_MC_ENDIF();
6855 IEM_MC_ADVANCE_RIP();
6856 IEM_MC_END();
6857 }
6858 else
6859 {
6860 /* memory target */
6861 IEM_MC_BEGIN(0, 1);
6862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6865 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6866 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6867 } IEM_MC_ELSE() {
6868 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6869 } IEM_MC_ENDIF();
6870 IEM_MC_ADVANCE_RIP();
6871 IEM_MC_END();
6872 }
6873 return VINF_SUCCESS;
6874}
6875
6876
6877/** Opcode 0x0f 0x97. */
6878FNIEMOP_DEF(iemOp_setnbe_Eb)
6879{
6880 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
6881 IEMOP_HLP_MIN_386();
6882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6883
6884 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6885 * any way. AMD says it's "unused", whatever that means. We're
6886 * ignoring for now. */
6887 if (IEM_IS_MODRM_REG_MODE(bRm))
6888 {
6889 /* register target */
6890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6891 IEM_MC_BEGIN(0, 0);
6892 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6893 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6894 } IEM_MC_ELSE() {
6895 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6896 } IEM_MC_ENDIF();
6897 IEM_MC_ADVANCE_RIP();
6898 IEM_MC_END();
6899 }
6900 else
6901 {
6902 /* memory target */
6903 IEM_MC_BEGIN(0, 1);
6904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6907 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
6908 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6909 } IEM_MC_ELSE() {
6910 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6911 } IEM_MC_ENDIF();
6912 IEM_MC_ADVANCE_RIP();
6913 IEM_MC_END();
6914 }
6915 return VINF_SUCCESS;
6916}
6917
6918
6919/** Opcode 0x0f 0x98. */
6920FNIEMOP_DEF(iemOp_sets_Eb)
6921{
6922 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
6923 IEMOP_HLP_MIN_386();
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925
6926 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6927 * any way. AMD says it's "unused", whatever that means. We're
6928 * ignoring for now. */
6929 if (IEM_IS_MODRM_REG_MODE(bRm))
6930 {
6931 /* register target */
6932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6933 IEM_MC_BEGIN(0, 0);
6934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6935 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6936 } IEM_MC_ELSE() {
6937 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6938 } IEM_MC_ENDIF();
6939 IEM_MC_ADVANCE_RIP();
6940 IEM_MC_END();
6941 }
6942 else
6943 {
6944 /* memory target */
6945 IEM_MC_BEGIN(0, 1);
6946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6949 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6950 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6951 } IEM_MC_ELSE() {
6952 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6953 } IEM_MC_ENDIF();
6954 IEM_MC_ADVANCE_RIP();
6955 IEM_MC_END();
6956 }
6957 return VINF_SUCCESS;
6958}
6959
6960
6961/** Opcode 0x0f 0x99. */
6962FNIEMOP_DEF(iemOp_setns_Eb)
6963{
6964 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
6965 IEMOP_HLP_MIN_386();
6966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6967
6968 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
6969 * any way. AMD says it's "unused", whatever that means. We're
6970 * ignoring for now. */
6971 if (IEM_IS_MODRM_REG_MODE(bRm))
6972 {
6973 /* register target */
6974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6975 IEM_MC_BEGIN(0, 0);
6976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6977 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
6978 } IEM_MC_ELSE() {
6979 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
6980 } IEM_MC_ENDIF();
6981 IEM_MC_ADVANCE_RIP();
6982 IEM_MC_END();
6983 }
6984 else
6985 {
6986 /* memory target */
6987 IEM_MC_BEGIN(0, 1);
6988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6991 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
6992 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6993 } IEM_MC_ELSE() {
6994 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
6995 } IEM_MC_ENDIF();
6996 IEM_MC_ADVANCE_RIP();
6997 IEM_MC_END();
6998 }
6999 return VINF_SUCCESS;
7000}
7001
7002
7003/** Opcode 0x0f 0x9a. */
7004FNIEMOP_DEF(iemOp_setp_Eb)
7005{
7006 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
7007 IEMOP_HLP_MIN_386();
7008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7009
7010 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7011 * any way. AMD says it's "unused", whatever that means. We're
7012 * ignoring for now. */
7013 if (IEM_IS_MODRM_REG_MODE(bRm))
7014 {
7015 /* register target */
7016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7017 IEM_MC_BEGIN(0, 0);
7018 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7019 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7020 } IEM_MC_ELSE() {
7021 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7022 } IEM_MC_ENDIF();
7023 IEM_MC_ADVANCE_RIP();
7024 IEM_MC_END();
7025 }
7026 else
7027 {
7028 /* memory target */
7029 IEM_MC_BEGIN(0, 1);
7030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7033 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7034 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7035 } IEM_MC_ELSE() {
7036 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7037 } IEM_MC_ENDIF();
7038 IEM_MC_ADVANCE_RIP();
7039 IEM_MC_END();
7040 }
7041 return VINF_SUCCESS;
7042}
7043
7044
7045/** Opcode 0x0f 0x9b. */
7046FNIEMOP_DEF(iemOp_setnp_Eb)
7047{
7048 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
7049 IEMOP_HLP_MIN_386();
7050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7051
7052 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7053 * any way. AMD says it's "unused", whatever that means. We're
7054 * ignoring for now. */
7055 if (IEM_IS_MODRM_REG_MODE(bRm))
7056 {
7057 /* register target */
7058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7059 IEM_MC_BEGIN(0, 0);
7060 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7061 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7062 } IEM_MC_ELSE() {
7063 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7064 } IEM_MC_ENDIF();
7065 IEM_MC_ADVANCE_RIP();
7066 IEM_MC_END();
7067 }
7068 else
7069 {
7070 /* memory target */
7071 IEM_MC_BEGIN(0, 1);
7072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7075 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7076 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7077 } IEM_MC_ELSE() {
7078 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7079 } IEM_MC_ENDIF();
7080 IEM_MC_ADVANCE_RIP();
7081 IEM_MC_END();
7082 }
7083 return VINF_SUCCESS;
7084}
7085
7086
7087/** Opcode 0x0f 0x9c. */
7088FNIEMOP_DEF(iemOp_setl_Eb)
7089{
7090 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
7091 IEMOP_HLP_MIN_386();
7092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7093
7094 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7095 * any way. AMD says it's "unused", whatever that means. We're
7096 * ignoring for now. */
7097 if (IEM_IS_MODRM_REG_MODE(bRm))
7098 {
7099 /* register target */
7100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7101 IEM_MC_BEGIN(0, 0);
7102 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7103 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7104 } IEM_MC_ELSE() {
7105 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7106 } IEM_MC_ENDIF();
7107 IEM_MC_ADVANCE_RIP();
7108 IEM_MC_END();
7109 }
7110 else
7111 {
7112 /* memory target */
7113 IEM_MC_BEGIN(0, 1);
7114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7117 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7118 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7119 } IEM_MC_ELSE() {
7120 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7121 } IEM_MC_ENDIF();
7122 IEM_MC_ADVANCE_RIP();
7123 IEM_MC_END();
7124 }
7125 return VINF_SUCCESS;
7126}
7127
7128
7129/** Opcode 0x0f 0x9d. */
7130FNIEMOP_DEF(iemOp_setnl_Eb)
7131{
7132 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
7133 IEMOP_HLP_MIN_386();
7134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7135
7136 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7137 * any way. AMD says it's "unused", whatever that means. We're
7138 * ignoring for now. */
7139 if (IEM_IS_MODRM_REG_MODE(bRm))
7140 {
7141 /* register target */
7142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7143 IEM_MC_BEGIN(0, 0);
7144 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7145 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7146 } IEM_MC_ELSE() {
7147 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7148 } IEM_MC_ENDIF();
7149 IEM_MC_ADVANCE_RIP();
7150 IEM_MC_END();
7151 }
7152 else
7153 {
7154 /* memory target */
7155 IEM_MC_BEGIN(0, 1);
7156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7159 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7160 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7161 } IEM_MC_ELSE() {
7162 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7163 } IEM_MC_ENDIF();
7164 IEM_MC_ADVANCE_RIP();
7165 IEM_MC_END();
7166 }
7167 return VINF_SUCCESS;
7168}
7169
7170
7171/** Opcode 0x0f 0x9e. */
7172FNIEMOP_DEF(iemOp_setle_Eb)
7173{
7174 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
7175 IEMOP_HLP_MIN_386();
7176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7177
7178 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7179 * any way. AMD says it's "unused", whatever that means. We're
7180 * ignoring for now. */
7181 if (IEM_IS_MODRM_REG_MODE(bRm))
7182 {
7183 /* register target */
7184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7185 IEM_MC_BEGIN(0, 0);
7186 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7187 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7188 } IEM_MC_ELSE() {
7189 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7190 } IEM_MC_ENDIF();
7191 IEM_MC_ADVANCE_RIP();
7192 IEM_MC_END();
7193 }
7194 else
7195 {
7196 /* memory target */
7197 IEM_MC_BEGIN(0, 1);
7198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7201 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7202 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7203 } IEM_MC_ELSE() {
7204 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7205 } IEM_MC_ENDIF();
7206 IEM_MC_ADVANCE_RIP();
7207 IEM_MC_END();
7208 }
7209 return VINF_SUCCESS;
7210}
7211
7212
7213/** Opcode 0x0f 0x9f. */
7214FNIEMOP_DEF(iemOp_setnle_Eb)
7215{
7216 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
7217 IEMOP_HLP_MIN_386();
7218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7219
7220 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
7221 * any way. AMD says it's "unused", whatever that means. We're
7222 * ignoring for now. */
7223 if (IEM_IS_MODRM_REG_MODE(bRm))
7224 {
7225 /* register target */
7226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7227 IEM_MC_BEGIN(0, 0);
7228 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7229 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
7230 } IEM_MC_ELSE() {
7231 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
7232 } IEM_MC_ENDIF();
7233 IEM_MC_ADVANCE_RIP();
7234 IEM_MC_END();
7235 }
7236 else
7237 {
7238 /* memory target */
7239 IEM_MC_BEGIN(0, 1);
7240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7243 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7244 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7245 } IEM_MC_ELSE() {
7246 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
7247 } IEM_MC_ENDIF();
7248 IEM_MC_ADVANCE_RIP();
7249 IEM_MC_END();
7250 }
7251 return VINF_SUCCESS;
7252}
7253
7254
7255/**
7256 * Common 'push segment-register' helper.
7257 */
7258FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
7259{
7260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7261 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT);
7262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7263
7264 switch (pVCpu->iem.s.enmEffOpSize)
7265 {
7266 case IEMMODE_16BIT:
7267 IEM_MC_BEGIN(0, 1);
7268 IEM_MC_LOCAL(uint16_t, u16Value);
7269 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
7270 IEM_MC_PUSH_U16(u16Value);
7271 IEM_MC_ADVANCE_RIP();
7272 IEM_MC_END();
7273 break;
7274
7275 case IEMMODE_32BIT:
7276 IEM_MC_BEGIN(0, 1);
7277 IEM_MC_LOCAL(uint32_t, u32Value);
7278 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
7279 IEM_MC_PUSH_U32_SREG(u32Value);
7280 IEM_MC_ADVANCE_RIP();
7281 IEM_MC_END();
7282 break;
7283
7284 case IEMMODE_64BIT:
7285 IEM_MC_BEGIN(0, 1);
7286 IEM_MC_LOCAL(uint64_t, u64Value);
7287 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
7288 IEM_MC_PUSH_U64(u64Value);
7289 IEM_MC_ADVANCE_RIP();
7290 IEM_MC_END();
7291 break;
7292 }
7293
7294 return VINF_SUCCESS;
7295}
7296
7297
7298/** Opcode 0x0f 0xa0. */
7299FNIEMOP_DEF(iemOp_push_fs)
7300{
7301 IEMOP_MNEMONIC(push_fs, "push fs");
7302 IEMOP_HLP_MIN_386();
7303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7304 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
7305}
7306
7307
7308/** Opcode 0x0f 0xa1. */
7309FNIEMOP_DEF(iemOp_pop_fs)
7310{
7311 IEMOP_MNEMONIC(pop_fs, "pop fs");
7312 IEMOP_HLP_MIN_386();
7313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7314 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
7315}
7316
7317
7318/** Opcode 0x0f 0xa2. */
7319FNIEMOP_DEF(iemOp_cpuid)
7320{
7321 IEMOP_MNEMONIC(cpuid, "cpuid");
7322 IEMOP_HLP_MIN_486(); /* not all 486es. */
7323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7324 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
7325}
7326
7327
7328/**
7329 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
7330 * iemOp_bts_Ev_Gv.
7331 */
7332FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
7333{
7334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7335 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7336
7337 if (IEM_IS_MODRM_REG_MODE(bRm))
7338 {
7339 /* register destination. */
7340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7341 switch (pVCpu->iem.s.enmEffOpSize)
7342 {
7343 case IEMMODE_16BIT:
7344 IEM_MC_BEGIN(3, 0);
7345 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7346 IEM_MC_ARG(uint16_t, u16Src, 1);
7347 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7348
7349 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7350 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
7351 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7352 IEM_MC_REF_EFLAGS(pEFlags);
7353 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7354
7355 IEM_MC_ADVANCE_RIP();
7356 IEM_MC_END();
7357 return VINF_SUCCESS;
7358
7359 case IEMMODE_32BIT:
7360 IEM_MC_BEGIN(3, 0);
7361 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7362 IEM_MC_ARG(uint32_t, u32Src, 1);
7363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7364
7365 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7366 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
7367 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7368 IEM_MC_REF_EFLAGS(pEFlags);
7369 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7370
7371 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7372 IEM_MC_ADVANCE_RIP();
7373 IEM_MC_END();
7374 return VINF_SUCCESS;
7375
7376 case IEMMODE_64BIT:
7377 IEM_MC_BEGIN(3, 0);
7378 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7379 IEM_MC_ARG(uint64_t, u64Src, 1);
7380 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7381
7382 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7383 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
7384 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7385 IEM_MC_REF_EFLAGS(pEFlags);
7386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7387
7388 IEM_MC_ADVANCE_RIP();
7389 IEM_MC_END();
7390 return VINF_SUCCESS;
7391
7392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7393 }
7394 }
7395 else
7396 {
7397 /* memory destination. */
7398
7399 uint32_t fAccess;
7400 if (pImpl->pfnLockedU16)
7401 fAccess = IEM_ACCESS_DATA_RW;
7402 else /* BT */
7403 fAccess = IEM_ACCESS_DATA_R;
7404
7405 /** @todo test negative bit offsets! */
7406 switch (pVCpu->iem.s.enmEffOpSize)
7407 {
7408 case IEMMODE_16BIT:
7409 IEM_MC_BEGIN(3, 2);
7410 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7411 IEM_MC_ARG(uint16_t, u16Src, 1);
7412 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7414 IEM_MC_LOCAL(int16_t, i16AddrAdj);
7415
7416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7417 if (pImpl->pfnLockedU16)
7418 IEMOP_HLP_DONE_DECODING();
7419 else
7420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7421 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7422 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
7423 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
7424 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
7425 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
7426 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
7427 IEM_MC_FETCH_EFLAGS(EFlags);
7428
7429 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7430 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7432 else
7433 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7434 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7435
7436 IEM_MC_COMMIT_EFLAGS(EFlags);
7437 IEM_MC_ADVANCE_RIP();
7438 IEM_MC_END();
7439 return VINF_SUCCESS;
7440
7441 case IEMMODE_32BIT:
7442 IEM_MC_BEGIN(3, 2);
7443 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7444 IEM_MC_ARG(uint32_t, u32Src, 1);
7445 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7447 IEM_MC_LOCAL(int32_t, i32AddrAdj);
7448
7449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7450 if (pImpl->pfnLockedU16)
7451 IEMOP_HLP_DONE_DECODING();
7452 else
7453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7454 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7455 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
7456 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
7457 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
7458 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
7459 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
7460 IEM_MC_FETCH_EFLAGS(EFlags);
7461
7462 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7463 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7464 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7465 else
7466 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7467 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7468
7469 IEM_MC_COMMIT_EFLAGS(EFlags);
7470 IEM_MC_ADVANCE_RIP();
7471 IEM_MC_END();
7472 return VINF_SUCCESS;
7473
7474 case IEMMODE_64BIT:
7475 IEM_MC_BEGIN(3, 2);
7476 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7477 IEM_MC_ARG(uint64_t, u64Src, 1);
7478 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7480 IEM_MC_LOCAL(int64_t, i64AddrAdj);
7481
7482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7483 if (pImpl->pfnLockedU16)
7484 IEMOP_HLP_DONE_DECODING();
7485 else
7486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7487 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7488 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
7489 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
7490 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
7491 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
7492 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
7493 IEM_MC_FETCH_EFLAGS(EFlags);
7494
7495 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7496 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7497 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7498 else
7499 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7500 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7501
7502 IEM_MC_COMMIT_EFLAGS(EFlags);
7503 IEM_MC_ADVANCE_RIP();
7504 IEM_MC_END();
7505 return VINF_SUCCESS;
7506
7507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7508 }
7509 }
7510}
7511
7512
7513/** Opcode 0x0f 0xa3. */
7514FNIEMOP_DEF(iemOp_bt_Ev_Gv)
7515{
7516 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
7517 IEMOP_HLP_MIN_386();
7518 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
7519}
7520
7521
7522/**
7523 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
7524 */
7525FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
7526{
7527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7528 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7529
7530 if (IEM_IS_MODRM_REG_MODE(bRm))
7531 {
7532 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7534
7535 switch (pVCpu->iem.s.enmEffOpSize)
7536 {
7537 case IEMMODE_16BIT:
7538 IEM_MC_BEGIN(4, 0);
7539 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7540 IEM_MC_ARG(uint16_t, u16Src, 1);
7541 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7542 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7543
7544 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7545 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7546 IEM_MC_REF_EFLAGS(pEFlags);
7547 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7548
7549 IEM_MC_ADVANCE_RIP();
7550 IEM_MC_END();
7551 return VINF_SUCCESS;
7552
7553 case IEMMODE_32BIT:
7554 IEM_MC_BEGIN(4, 0);
7555 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7556 IEM_MC_ARG(uint32_t, u32Src, 1);
7557 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7558 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7559
7560 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7561 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7562 IEM_MC_REF_EFLAGS(pEFlags);
7563 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7564
7565 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7566 IEM_MC_ADVANCE_RIP();
7567 IEM_MC_END();
7568 return VINF_SUCCESS;
7569
7570 case IEMMODE_64BIT:
7571 IEM_MC_BEGIN(4, 0);
7572 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7573 IEM_MC_ARG(uint64_t, u64Src, 1);
7574 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
7575 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7576
7577 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7578 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7579 IEM_MC_REF_EFLAGS(pEFlags);
7580 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7581
7582 IEM_MC_ADVANCE_RIP();
7583 IEM_MC_END();
7584 return VINF_SUCCESS;
7585
7586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7587 }
7588 }
7589 else
7590 {
7591 switch (pVCpu->iem.s.enmEffOpSize)
7592 {
7593 case IEMMODE_16BIT:
7594 IEM_MC_BEGIN(4, 2);
7595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7596 IEM_MC_ARG(uint16_t, u16Src, 1);
7597 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7598 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7600
7601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7602 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7603 IEM_MC_ASSIGN(cShiftArg, cShift);
7604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7605 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7606 IEM_MC_FETCH_EFLAGS(EFlags);
7607 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7608 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7609
7610 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7611 IEM_MC_COMMIT_EFLAGS(EFlags);
7612 IEM_MC_ADVANCE_RIP();
7613 IEM_MC_END();
7614 return VINF_SUCCESS;
7615
7616 case IEMMODE_32BIT:
7617 IEM_MC_BEGIN(4, 2);
7618 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7619 IEM_MC_ARG(uint32_t, u32Src, 1);
7620 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7621 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7623
7624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7625 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7626 IEM_MC_ASSIGN(cShiftArg, cShift);
7627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7628 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7629 IEM_MC_FETCH_EFLAGS(EFlags);
7630 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7631 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7632
7633 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7634 IEM_MC_COMMIT_EFLAGS(EFlags);
7635 IEM_MC_ADVANCE_RIP();
7636 IEM_MC_END();
7637 return VINF_SUCCESS;
7638
7639 case IEMMODE_64BIT:
7640 IEM_MC_BEGIN(4, 2);
7641 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7642 IEM_MC_ARG(uint64_t, u64Src, 1);
7643 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7644 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7646
7647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7648 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7649 IEM_MC_ASSIGN(cShiftArg, cShift);
7650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7651 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7652 IEM_MC_FETCH_EFLAGS(EFlags);
7653 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7654 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7655
7656 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7657 IEM_MC_COMMIT_EFLAGS(EFlags);
7658 IEM_MC_ADVANCE_RIP();
7659 IEM_MC_END();
7660 return VINF_SUCCESS;
7661
7662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7663 }
7664 }
7665}
7666
7667
7668/**
7669 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
7670 */
7671FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
7672{
7673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7674 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
7675
7676 if (IEM_IS_MODRM_REG_MODE(bRm))
7677 {
7678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7679
7680 switch (pVCpu->iem.s.enmEffOpSize)
7681 {
7682 case IEMMODE_16BIT:
7683 IEM_MC_BEGIN(4, 0);
7684 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7685 IEM_MC_ARG(uint16_t, u16Src, 1);
7686 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7687 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7688
7689 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7690 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7691 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7692 IEM_MC_REF_EFLAGS(pEFlags);
7693 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7694
7695 IEM_MC_ADVANCE_RIP();
7696 IEM_MC_END();
7697 return VINF_SUCCESS;
7698
7699 case IEMMODE_32BIT:
7700 IEM_MC_BEGIN(4, 0);
7701 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7702 IEM_MC_ARG(uint32_t, u32Src, 1);
7703 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7704 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7705
7706 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7707 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7708 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7709 IEM_MC_REF_EFLAGS(pEFlags);
7710 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7711
7712 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7713 IEM_MC_ADVANCE_RIP();
7714 IEM_MC_END();
7715 return VINF_SUCCESS;
7716
7717 case IEMMODE_64BIT:
7718 IEM_MC_BEGIN(4, 0);
7719 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7720 IEM_MC_ARG(uint64_t, u64Src, 1);
7721 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7722 IEM_MC_ARG(uint32_t *, pEFlags, 3);
7723
7724 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7725 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7726 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7727 IEM_MC_REF_EFLAGS(pEFlags);
7728 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7729
7730 IEM_MC_ADVANCE_RIP();
7731 IEM_MC_END();
7732 return VINF_SUCCESS;
7733
7734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7735 }
7736 }
7737 else
7738 {
7739 switch (pVCpu->iem.s.enmEffOpSize)
7740 {
7741 case IEMMODE_16BIT:
7742 IEM_MC_BEGIN(4, 2);
7743 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7744 IEM_MC_ARG(uint16_t, u16Src, 1);
7745 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7748
7749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7751 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7752 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7753 IEM_MC_FETCH_EFLAGS(EFlags);
7754 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7755 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
7756
7757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7758 IEM_MC_COMMIT_EFLAGS(EFlags);
7759 IEM_MC_ADVANCE_RIP();
7760 IEM_MC_END();
7761 return VINF_SUCCESS;
7762
7763 case IEMMODE_32BIT:
7764 IEM_MC_BEGIN(4, 2);
7765 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7766 IEM_MC_ARG(uint32_t, u32Src, 1);
7767 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7768 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7770
7771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7773 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7774 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7775 IEM_MC_FETCH_EFLAGS(EFlags);
7776 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7777 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
7778
7779 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7780 IEM_MC_COMMIT_EFLAGS(EFlags);
7781 IEM_MC_ADVANCE_RIP();
7782 IEM_MC_END();
7783 return VINF_SUCCESS;
7784
7785 case IEMMODE_64BIT:
7786 IEM_MC_BEGIN(4, 2);
7787 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7788 IEM_MC_ARG(uint64_t, u64Src, 1);
7789 IEM_MC_ARG(uint8_t, cShiftArg, 2);
7790 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
7791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7792
7793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7795 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
7796 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7797 IEM_MC_FETCH_EFLAGS(EFlags);
7798 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
7799 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
7800
7801 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7802 IEM_MC_COMMIT_EFLAGS(EFlags);
7803 IEM_MC_ADVANCE_RIP();
7804 IEM_MC_END();
7805 return VINF_SUCCESS;
7806
7807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7808 }
7809 }
7810}
7811
7812
7813
7814/** Opcode 0x0f 0xa4. */
7815FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
7816{
7817 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
7818 IEMOP_HLP_MIN_386();
7819 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7820}
7821
7822
7823/** Opcode 0x0f 0xa5. */
7824FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
7825{
7826 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
7827 IEMOP_HLP_MIN_386();
7828 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
7829}
7830
7831
7832/** Opcode 0x0f 0xa8. */
7833FNIEMOP_DEF(iemOp_push_gs)
7834{
7835 IEMOP_MNEMONIC(push_gs, "push gs");
7836 IEMOP_HLP_MIN_386();
7837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7838 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
7839}
7840
7841
7842/** Opcode 0x0f 0xa9. */
7843FNIEMOP_DEF(iemOp_pop_gs)
7844{
7845 IEMOP_MNEMONIC(pop_gs, "pop gs");
7846 IEMOP_HLP_MIN_386();
7847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7848 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
7849}
7850
7851
7852/** Opcode 0x0f 0xaa. */
7853FNIEMOP_DEF(iemOp_rsm)
7854{
7855 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
7856 IEMOP_HLP_MIN_386(); /* 386SL and later. */
7857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rsm);
7859}
7860
7861
7862
7863/** Opcode 0x0f 0xab. */
7864FNIEMOP_DEF(iemOp_bts_Ev_Gv)
7865{
7866 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
7867 IEMOP_HLP_MIN_386();
7868 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
7869}
7870
7871
7872/** Opcode 0x0f 0xac. */
7873FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
7874{
7875 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
7876 IEMOP_HLP_MIN_386();
7877 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7878}
7879
7880
7881/** Opcode 0x0f 0xad. */
7882FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
7883{
7884 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
7885 IEMOP_HLP_MIN_386();
7886 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
7887}
7888
7889
7890/** Opcode 0x0f 0xae mem/0. */
7891FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
7892{
7893 IEMOP_MNEMONIC(fxsave, "fxsave m512");
7894 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7895 return IEMOP_RAISE_INVALID_OPCODE();
7896
7897 IEM_MC_BEGIN(3, 1);
7898 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7899 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7900 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7903 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7904 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7905 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
7906 IEM_MC_END();
7907 return VINF_SUCCESS;
7908}
7909
7910
7911/** Opcode 0x0f 0xae mem/1. */
7912FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
7913{
7914 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
7915 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
7916 return IEMOP_RAISE_INVALID_OPCODE();
7917
7918 IEM_MC_BEGIN(3, 1);
7919 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7920 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7921 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
7922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7924 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7925 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7926 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
7927 IEM_MC_END();
7928 return VINF_SUCCESS;
7929}
7930
7931
7932/**
7933 * @opmaps grp15
7934 * @opcode !11/2
7935 * @oppfx none
7936 * @opcpuid sse
7937 * @opgroup og_sse_mxcsrsm
7938 * @opxcpttype 5
7939 * @optest op1=0 -> mxcsr=0
7940 * @optest op1=0x2083 -> mxcsr=0x2083
7941 * @optest op1=0xfffffffe -> value.xcpt=0xd
7942 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
7943 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
7944 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
7945 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
7946 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
7947 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7948 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7949 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7950 */
7951FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
7952{
7953 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7954 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7955 return IEMOP_RAISE_INVALID_OPCODE();
7956
7957 IEM_MC_BEGIN(2, 0);
7958 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7959 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7962 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7963 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7964 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
7965 IEM_MC_END();
7966 return VINF_SUCCESS;
7967}
7968
7969
7970/**
7971 * @opmaps grp15
7972 * @opcode !11/3
7973 * @oppfx none
7974 * @opcpuid sse
7975 * @opgroup og_sse_mxcsrsm
7976 * @opxcpttype 5
7977 * @optest mxcsr=0 -> op1=0
7978 * @optest mxcsr=0x2083 -> op1=0x2083
7979 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
7980 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
7981 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
7982 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
7983 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
7984 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
7985 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
7986 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
7987 */
7988FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
7989{
7990 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7991 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
7992 return IEMOP_RAISE_INVALID_OPCODE();
7993
7994 IEM_MC_BEGIN(2, 0);
7995 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7996 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
7997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
7998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7999 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
8000 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8001 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
8002 IEM_MC_END();
8003 return VINF_SUCCESS;
8004}
8005
8006
8007/**
8008 * @opmaps grp15
8009 * @opcode !11/4
8010 * @oppfx none
8011 * @opcpuid xsave
8012 * @opgroup og_system
8013 * @opxcpttype none
8014 */
8015FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
8016{
8017 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
8018 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
8019 return IEMOP_RAISE_INVALID_OPCODE();
8020
8021 IEM_MC_BEGIN(3, 0);
8022 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8023 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8024 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8027 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8028 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8029 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
8030 IEM_MC_END();
8031 return VINF_SUCCESS;
8032}
8033
8034
8035/**
8036 * @opmaps grp15
8037 * @opcode !11/5
8038 * @oppfx none
8039 * @opcpuid xsave
8040 * @opgroup og_system
8041 * @opxcpttype none
8042 */
8043FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
8044{
8045 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
8046 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
8047 return IEMOP_RAISE_INVALID_OPCODE();
8048
8049 IEM_MC_BEGIN(3, 0);
8050 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8051 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8052 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
8053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8055 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8057 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
8058 IEM_MC_END();
8059 return VINF_SUCCESS;
8060}
8061
8062/** Opcode 0x0f 0xae mem/6. */
8063FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
8064
8065/**
8066 * @opmaps grp15
8067 * @opcode !11/7
8068 * @oppfx none
8069 * @opcpuid clfsh
8070 * @opgroup og_cachectl
8071 * @optest op1=1 ->
8072 */
8073FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
8074{
8075 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8076 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
8077 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
8078
8079 IEM_MC_BEGIN(2, 0);
8080 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8081 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8084 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8085 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
8086 IEM_MC_END();
8087 return VINF_SUCCESS;
8088}
8089
8090/**
8091 * @opmaps grp15
8092 * @opcode !11/7
8093 * @oppfx 0x66
8094 * @opcpuid clflushopt
8095 * @opgroup og_cachectl
8096 * @optest op1=1 ->
8097 */
8098FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
8099{
8100 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
8101 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
8102 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
8103
8104 IEM_MC_BEGIN(2, 0);
8105 IEM_MC_ARG(uint8_t, iEffSeg, 0);
8106 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
8107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8109 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8110 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
8111 IEM_MC_END();
8112 return VINF_SUCCESS;
8113}
8114
8115
8116/** Opcode 0x0f 0xae 11b/5. */
8117FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
8118{
8119 RT_NOREF_PV(bRm);
8120 IEMOP_MNEMONIC(lfence, "lfence");
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8123 return IEMOP_RAISE_INVALID_OPCODE();
8124
8125 IEM_MC_BEGIN(0, 0);
8126#ifndef RT_ARCH_ARM64
8127 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8128#endif
8129 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
8130#ifndef RT_ARCH_ARM64
8131 else
8132 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8133#endif
8134 IEM_MC_ADVANCE_RIP();
8135 IEM_MC_END();
8136 return VINF_SUCCESS;
8137}
8138
8139
8140/** Opcode 0x0f 0xae 11b/6. */
8141FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
8142{
8143 RT_NOREF_PV(bRm);
8144 IEMOP_MNEMONIC(mfence, "mfence");
8145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8146 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8147 return IEMOP_RAISE_INVALID_OPCODE();
8148
8149 IEM_MC_BEGIN(0, 0);
8150#ifndef RT_ARCH_ARM64
8151 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8152#endif
8153 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
8154#ifndef RT_ARCH_ARM64
8155 else
8156 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8157#endif
8158 IEM_MC_ADVANCE_RIP();
8159 IEM_MC_END();
8160 return VINF_SUCCESS;
8161}
8162
8163
8164/** Opcode 0x0f 0xae 11b/7. */
8165FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
8166{
8167 RT_NOREF_PV(bRm);
8168 IEMOP_MNEMONIC(sfence, "sfence");
8169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8170 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
8171 return IEMOP_RAISE_INVALID_OPCODE();
8172
8173 IEM_MC_BEGIN(0, 0);
8174#ifndef RT_ARCH_ARM64
8175 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
8176#endif
8177 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
8178#ifndef RT_ARCH_ARM64
8179 else
8180 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
8181#endif
8182 IEM_MC_ADVANCE_RIP();
8183 IEM_MC_END();
8184 return VINF_SUCCESS;
8185}
8186
8187
8188/** Opcode 0xf3 0x0f 0xae 11b/0. */
8189FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
8190{
8191 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8194 {
8195 IEM_MC_BEGIN(1, 0);
8196 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8197 IEM_MC_ARG(uint64_t, u64Dst, 0);
8198 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
8199 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8200 IEM_MC_ADVANCE_RIP();
8201 IEM_MC_END();
8202 }
8203 else
8204 {
8205 IEM_MC_BEGIN(1, 0);
8206 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8207 IEM_MC_ARG(uint32_t, u32Dst, 0);
8208 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
8209 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8210 IEM_MC_ADVANCE_RIP();
8211 IEM_MC_END();
8212 }
8213 return VINF_SUCCESS;
8214}
8215
8216
8217/** Opcode 0xf3 0x0f 0xae 11b/1. */
8218FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
8219{
8220 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
8221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8222 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8223 {
8224 IEM_MC_BEGIN(1, 0);
8225 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8226 IEM_MC_ARG(uint64_t, u64Dst, 0);
8227 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
8228 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
8229 IEM_MC_ADVANCE_RIP();
8230 IEM_MC_END();
8231 }
8232 else
8233 {
8234 IEM_MC_BEGIN(1, 0);
8235 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8236 IEM_MC_ARG(uint32_t, u32Dst, 0);
8237 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
8238 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
8239 IEM_MC_ADVANCE_RIP();
8240 IEM_MC_END();
8241 }
8242 return VINF_SUCCESS;
8243}
8244
8245
8246/** Opcode 0xf3 0x0f 0xae 11b/2. */
8247FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
8248{
8249 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
8250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8251 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8252 {
8253 IEM_MC_BEGIN(1, 0);
8254 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8255 IEM_MC_ARG(uint64_t, u64Dst, 0);
8256 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8257 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8258 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
8259 IEM_MC_ADVANCE_RIP();
8260 IEM_MC_END();
8261 }
8262 else
8263 {
8264 IEM_MC_BEGIN(1, 0);
8265 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8266 IEM_MC_ARG(uint32_t, u32Dst, 0);
8267 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8268 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
8269 IEM_MC_ADVANCE_RIP();
8270 IEM_MC_END();
8271 }
8272 return VINF_SUCCESS;
8273}
8274
8275
8276/** Opcode 0xf3 0x0f 0xae 11b/3. */
8277FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
8278{
8279 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
8280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8281 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
8282 {
8283 IEM_MC_BEGIN(1, 0);
8284 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8285 IEM_MC_ARG(uint64_t, u64Dst, 0);
8286 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8287 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
8288 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
8289 IEM_MC_ADVANCE_RIP();
8290 IEM_MC_END();
8291 }
8292 else
8293 {
8294 IEM_MC_BEGIN(1, 0);
8295 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
8296 IEM_MC_ARG(uint32_t, u32Dst, 0);
8297 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8298 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
8299 IEM_MC_ADVANCE_RIP();
8300 IEM_MC_END();
8301 }
8302 return VINF_SUCCESS;
8303}
8304
8305
8306/**
8307 * Group 15 jump table for register variant.
8308 */
8309IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
8310{ /* pfx: none, 066h, 0f3h, 0f2h */
8311 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
8312 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
8313 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
8314 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
8315 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
8316 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8317 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8318 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8319};
8320AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
8321
8322
8323/**
8324 * Group 15 jump table for memory variant.
8325 */
8326IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
8327{ /* pfx: none, 066h, 0f3h, 0f2h */
8328 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8329 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8330 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8331 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8332 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8333 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8334 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8335 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
8336};
8337AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
8338
8339
8340/** Opcode 0x0f 0xae. */
8341FNIEMOP_DEF(iemOp_Grp15)
8342{
8343 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
8344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8345 if (IEM_IS_MODRM_REG_MODE(bRm))
8346 /* register, register */
8347 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8348 + pVCpu->iem.s.idxPrefix], bRm);
8349 /* memory, register */
8350 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
8351 + pVCpu->iem.s.idxPrefix], bRm);
8352}
8353
8354
8355/** Opcode 0x0f 0xaf. */
8356FNIEMOP_DEF(iemOp_imul_Gv_Ev)
8357{
8358 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
8359 IEMOP_HLP_MIN_386();
8360 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8361 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags));
8362}
8363
8364
8365/** Opcode 0x0f 0xb0. */
8366FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
8367{
8368 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
8369 IEMOP_HLP_MIN_486();
8370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8371
8372 if (IEM_IS_MODRM_REG_MODE(bRm))
8373 {
8374 IEMOP_HLP_DONE_DECODING();
8375 IEM_MC_BEGIN(4, 0);
8376 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8377 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8378 IEM_MC_ARG(uint8_t, u8Src, 2);
8379 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8380
8381 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8382 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8383 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
8384 IEM_MC_REF_EFLAGS(pEFlags);
8385 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8386 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8387 else
8388 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8389
8390 IEM_MC_ADVANCE_RIP();
8391 IEM_MC_END();
8392 }
8393 else
8394 {
8395 IEM_MC_BEGIN(4, 3);
8396 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8397 IEM_MC_ARG(uint8_t *, pu8Al, 1);
8398 IEM_MC_ARG(uint8_t, u8Src, 2);
8399 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8401 IEM_MC_LOCAL(uint8_t, u8Al);
8402
8403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8404 IEMOP_HLP_DONE_DECODING();
8405 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8406 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8407 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
8408 IEM_MC_FETCH_EFLAGS(EFlags);
8409 IEM_MC_REF_LOCAL(pu8Al, u8Al);
8410 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8411 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
8412 else
8413 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
8414
8415 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
8416 IEM_MC_COMMIT_EFLAGS(EFlags);
8417 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
8418 IEM_MC_ADVANCE_RIP();
8419 IEM_MC_END();
8420 }
8421 return VINF_SUCCESS;
8422}
8423
8424/** Opcode 0x0f 0xb1. */
8425FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
8426{
8427 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
8428 IEMOP_HLP_MIN_486();
8429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8430
8431 if (IEM_IS_MODRM_REG_MODE(bRm))
8432 {
8433 IEMOP_HLP_DONE_DECODING();
8434 switch (pVCpu->iem.s.enmEffOpSize)
8435 {
8436 case IEMMODE_16BIT:
8437 IEM_MC_BEGIN(4, 0);
8438 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8439 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8440 IEM_MC_ARG(uint16_t, u16Src, 2);
8441 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8442
8443 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8444 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8445 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
8446 IEM_MC_REF_EFLAGS(pEFlags);
8447 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8448 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8449 else
8450 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8451
8452 IEM_MC_ADVANCE_RIP();
8453 IEM_MC_END();
8454 return VINF_SUCCESS;
8455
8456 case IEMMODE_32BIT:
8457 IEM_MC_BEGIN(4, 0);
8458 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8459 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8460 IEM_MC_ARG(uint32_t, u32Src, 2);
8461 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8462
8463 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8464 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8465 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
8466 IEM_MC_REF_EFLAGS(pEFlags);
8467 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8468 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8469 else
8470 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8471
8472 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
8473 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8474 IEM_MC_ADVANCE_RIP();
8475 IEM_MC_END();
8476 return VINF_SUCCESS;
8477
8478 case IEMMODE_64BIT:
8479 IEM_MC_BEGIN(4, 0);
8480 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8481 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8482#ifdef RT_ARCH_X86
8483 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8484#else
8485 IEM_MC_ARG(uint64_t, u64Src, 2);
8486#endif
8487 IEM_MC_ARG(uint32_t *, pEFlags, 3);
8488
8489 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8490 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
8491 IEM_MC_REF_EFLAGS(pEFlags);
8492#ifdef RT_ARCH_X86
8493 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8494 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8495 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8496 else
8497 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8498#else
8499 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8500 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8501 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8502 else
8503 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8504#endif
8505
8506 IEM_MC_ADVANCE_RIP();
8507 IEM_MC_END();
8508 return VINF_SUCCESS;
8509
8510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8511 }
8512 }
8513 else
8514 {
8515 switch (pVCpu->iem.s.enmEffOpSize)
8516 {
8517 case IEMMODE_16BIT:
8518 IEM_MC_BEGIN(4, 3);
8519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8520 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
8521 IEM_MC_ARG(uint16_t, u16Src, 2);
8522 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8524 IEM_MC_LOCAL(uint16_t, u16Ax);
8525
8526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8527 IEMOP_HLP_DONE_DECODING();
8528 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8529 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8530 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
8531 IEM_MC_FETCH_EFLAGS(EFlags);
8532 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
8533 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8534 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
8535 else
8536 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
8537
8538 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8539 IEM_MC_COMMIT_EFLAGS(EFlags);
8540 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
8541 IEM_MC_ADVANCE_RIP();
8542 IEM_MC_END();
8543 return VINF_SUCCESS;
8544
8545 case IEMMODE_32BIT:
8546 IEM_MC_BEGIN(4, 3);
8547 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8548 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
8549 IEM_MC_ARG(uint32_t, u32Src, 2);
8550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8552 IEM_MC_LOCAL(uint32_t, u32Eax);
8553
8554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8555 IEMOP_HLP_DONE_DECODING();
8556 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8557 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8558 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
8559 IEM_MC_FETCH_EFLAGS(EFlags);
8560 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
8561 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8562 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
8563 else
8564 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
8565
8566 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8567 IEM_MC_COMMIT_EFLAGS(EFlags);
8568 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
8569 IEM_MC_ADVANCE_RIP();
8570 IEM_MC_END();
8571 return VINF_SUCCESS;
8572
8573 case IEMMODE_64BIT:
8574 IEM_MC_BEGIN(4, 3);
8575 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8576 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
8577#ifdef RT_ARCH_X86
8578 IEM_MC_ARG(uint64_t *, pu64Src, 2);
8579#else
8580 IEM_MC_ARG(uint64_t, u64Src, 2);
8581#endif
8582 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
8583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8584 IEM_MC_LOCAL(uint64_t, u64Rax);
8585
8586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8587 IEMOP_HLP_DONE_DECODING();
8588 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8589 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
8590 IEM_MC_FETCH_EFLAGS(EFlags);
8591 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
8592#ifdef RT_ARCH_X86
8593 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8594 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8595 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
8596 else
8597 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
8598#else
8599 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
8600 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
8601 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
8602 else
8603 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
8604#endif
8605
8606 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8607 IEM_MC_COMMIT_EFLAGS(EFlags);
8608 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
8609 IEM_MC_ADVANCE_RIP();
8610 IEM_MC_END();
8611 return VINF_SUCCESS;
8612
8613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8614 }
8615 }
8616}
8617
8618
8619FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
8620{
8621 Assert(IEM_IS_MODRM_MEM_MODE(bRm)); /* Caller checks this */
8622 uint8_t const iGReg = IEM_GET_MODRM_REG(pVCpu, bRm);
8623
8624 switch (pVCpu->iem.s.enmEffOpSize)
8625 {
8626 case IEMMODE_16BIT:
8627 IEM_MC_BEGIN(5, 1);
8628 IEM_MC_ARG(uint16_t, uSel, 0);
8629 IEM_MC_ARG(uint16_t, offSeg, 1);
8630 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8631 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8632 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8633 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8636 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8637 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
8638 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8639 IEM_MC_END();
8640 return VINF_SUCCESS;
8641
8642 case IEMMODE_32BIT:
8643 IEM_MC_BEGIN(5, 1);
8644 IEM_MC_ARG(uint16_t, uSel, 0);
8645 IEM_MC_ARG(uint32_t, offSeg, 1);
8646 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8647 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8648 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8649 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8652 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8653 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
8654 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8655 IEM_MC_END();
8656 return VINF_SUCCESS;
8657
8658 case IEMMODE_64BIT:
8659 IEM_MC_BEGIN(5, 1);
8660 IEM_MC_ARG(uint16_t, uSel, 0);
8661 IEM_MC_ARG(uint64_t, offSeg, 1);
8662 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
8663 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
8664 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
8665 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
8666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
8667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8668 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
8669 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8670 else
8671 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
8672 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
8673 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
8674 IEM_MC_END();
8675 return VINF_SUCCESS;
8676
8677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8678 }
8679}
8680
8681
8682/** Opcode 0x0f 0xb2. */
8683FNIEMOP_DEF(iemOp_lss_Gv_Mp)
8684{
8685 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
8686 IEMOP_HLP_MIN_386();
8687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8688 if (IEM_IS_MODRM_REG_MODE(bRm))
8689 return IEMOP_RAISE_INVALID_OPCODE();
8690 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
8691}
8692
8693
8694/** Opcode 0x0f 0xb3. */
8695FNIEMOP_DEF(iemOp_btr_Ev_Gv)
8696{
8697 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
8698 IEMOP_HLP_MIN_386();
8699 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
8700}
8701
8702
8703/** Opcode 0x0f 0xb4. */
8704FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
8705{
8706 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
8707 IEMOP_HLP_MIN_386();
8708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8709 if (IEM_IS_MODRM_REG_MODE(bRm))
8710 return IEMOP_RAISE_INVALID_OPCODE();
8711 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
8712}
8713
8714
8715/** Opcode 0x0f 0xb5. */
8716FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
8717{
8718 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
8719 IEMOP_HLP_MIN_386();
8720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8721 if (IEM_IS_MODRM_REG_MODE(bRm))
8722 return IEMOP_RAISE_INVALID_OPCODE();
8723 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
8724}
8725
8726
8727/** Opcode 0x0f 0xb6. */
8728FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
8729{
8730 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
8731 IEMOP_HLP_MIN_386();
8732
8733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8734
8735 /*
8736 * If rm is denoting a register, no more instruction bytes.
8737 */
8738 if (IEM_IS_MODRM_REG_MODE(bRm))
8739 {
8740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8741 switch (pVCpu->iem.s.enmEffOpSize)
8742 {
8743 case IEMMODE_16BIT:
8744 IEM_MC_BEGIN(0, 1);
8745 IEM_MC_LOCAL(uint16_t, u16Value);
8746 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8747 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8748 IEM_MC_ADVANCE_RIP();
8749 IEM_MC_END();
8750 return VINF_SUCCESS;
8751
8752 case IEMMODE_32BIT:
8753 IEM_MC_BEGIN(0, 1);
8754 IEM_MC_LOCAL(uint32_t, u32Value);
8755 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8756 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8757 IEM_MC_ADVANCE_RIP();
8758 IEM_MC_END();
8759 return VINF_SUCCESS;
8760
8761 case IEMMODE_64BIT:
8762 IEM_MC_BEGIN(0, 1);
8763 IEM_MC_LOCAL(uint64_t, u64Value);
8764 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8765 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8766 IEM_MC_ADVANCE_RIP();
8767 IEM_MC_END();
8768 return VINF_SUCCESS;
8769
8770 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8771 }
8772 }
8773 else
8774 {
8775 /*
8776 * We're loading a register from memory.
8777 */
8778 switch (pVCpu->iem.s.enmEffOpSize)
8779 {
8780 case IEMMODE_16BIT:
8781 IEM_MC_BEGIN(0, 2);
8782 IEM_MC_LOCAL(uint16_t, u16Value);
8783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8786 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8787 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
8788 IEM_MC_ADVANCE_RIP();
8789 IEM_MC_END();
8790 return VINF_SUCCESS;
8791
8792 case IEMMODE_32BIT:
8793 IEM_MC_BEGIN(0, 2);
8794 IEM_MC_LOCAL(uint32_t, u32Value);
8795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8798 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8799 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8800 IEM_MC_ADVANCE_RIP();
8801 IEM_MC_END();
8802 return VINF_SUCCESS;
8803
8804 case IEMMODE_64BIT:
8805 IEM_MC_BEGIN(0, 2);
8806 IEM_MC_LOCAL(uint64_t, u64Value);
8807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8810 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8811 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8812 IEM_MC_ADVANCE_RIP();
8813 IEM_MC_END();
8814 return VINF_SUCCESS;
8815
8816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8817 }
8818 }
8819}
8820
8821
8822/** Opcode 0x0f 0xb7. */
8823FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
8824{
8825 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
8826 IEMOP_HLP_MIN_386();
8827
8828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8829
8830 /** @todo Not entirely sure how the operand size prefix is handled here,
8831 * assuming that it will be ignored. Would be nice to have a few
8832 * test for this. */
8833 /*
8834 * If rm is denoting a register, no more instruction bytes.
8835 */
8836 if (IEM_IS_MODRM_REG_MODE(bRm))
8837 {
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8840 {
8841 IEM_MC_BEGIN(0, 1);
8842 IEM_MC_LOCAL(uint32_t, u32Value);
8843 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8844 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8845 IEM_MC_ADVANCE_RIP();
8846 IEM_MC_END();
8847 }
8848 else
8849 {
8850 IEM_MC_BEGIN(0, 1);
8851 IEM_MC_LOCAL(uint64_t, u64Value);
8852 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
8853 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8854 IEM_MC_ADVANCE_RIP();
8855 IEM_MC_END();
8856 }
8857 }
8858 else
8859 {
8860 /*
8861 * We're loading a register from memory.
8862 */
8863 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
8864 {
8865 IEM_MC_BEGIN(0, 2);
8866 IEM_MC_LOCAL(uint32_t, u32Value);
8867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8870 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8871 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
8872 IEM_MC_ADVANCE_RIP();
8873 IEM_MC_END();
8874 }
8875 else
8876 {
8877 IEM_MC_BEGIN(0, 2);
8878 IEM_MC_LOCAL(uint64_t, u64Value);
8879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8882 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8883 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
8884 IEM_MC_ADVANCE_RIP();
8885 IEM_MC_END();
8886 }
8887 }
8888 return VINF_SUCCESS;
8889}
8890
8891
8892/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
8893FNIEMOP_UD_STUB(iemOp_jmpe);
8894
8895
8896/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
8897FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
8898{
8899 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
8900 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
8901 return iemOp_InvalidNeedRM(pVCpu);
8902#ifndef TST_IEM_CHECK_MC
8903# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
8904 static const IEMOPBINSIZES s_Native =
8905 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
8906# endif
8907 static const IEMOPBINSIZES s_Fallback =
8908 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
8909#endif
8910 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback));
8911}
8912
8913
8914/**
8915 * @opcode 0xb9
8916 * @opinvalid intel-modrm
8917 * @optest ->
8918 */
8919FNIEMOP_DEF(iemOp_Grp10)
8920{
8921 /*
8922 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
8923 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
8924 */
8925 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
8926 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
8927 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
8928}
8929
8930
8931/** Opcode 0x0f 0xba. */
8932FNIEMOP_DEF(iemOp_Grp8)
8933{
8934 IEMOP_HLP_MIN_386();
8935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8936 PCIEMOPBINSIZES pImpl;
8937 switch (IEM_GET_MODRM_REG_8(bRm))
8938 {
8939 case 0: case 1: case 2: case 3:
8940 /* Both AMD and Intel want full modr/m decoding and imm8. */
8941 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
8942 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
8943 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
8944 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
8945 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
8946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8947 }
8948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8949
8950 if (IEM_IS_MODRM_REG_MODE(bRm))
8951 {
8952 /* register destination. */
8953 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
8954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8955
8956 switch (pVCpu->iem.s.enmEffOpSize)
8957 {
8958 case IEMMODE_16BIT:
8959 IEM_MC_BEGIN(3, 0);
8960 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8961 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
8962 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8963
8964 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8965 IEM_MC_REF_EFLAGS(pEFlags);
8966 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8967
8968 IEM_MC_ADVANCE_RIP();
8969 IEM_MC_END();
8970 return VINF_SUCCESS;
8971
8972 case IEMMODE_32BIT:
8973 IEM_MC_BEGIN(3, 0);
8974 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8975 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
8976 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8977
8978 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8979 IEM_MC_REF_EFLAGS(pEFlags);
8980 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8981
8982 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8983 IEM_MC_ADVANCE_RIP();
8984 IEM_MC_END();
8985 return VINF_SUCCESS;
8986
8987 case IEMMODE_64BIT:
8988 IEM_MC_BEGIN(3, 0);
8989 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8990 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
8991 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8992
8993 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8994 IEM_MC_REF_EFLAGS(pEFlags);
8995 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8996
8997 IEM_MC_ADVANCE_RIP();
8998 IEM_MC_END();
8999 return VINF_SUCCESS;
9000
9001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9002 }
9003 }
9004 else
9005 {
9006 /* memory destination. */
9007
9008 uint32_t fAccess;
9009 if (pImpl->pfnLockedU16)
9010 fAccess = IEM_ACCESS_DATA_RW;
9011 else /* BT */
9012 fAccess = IEM_ACCESS_DATA_R;
9013
9014 /** @todo test negative bit offsets! */
9015 switch (pVCpu->iem.s.enmEffOpSize)
9016 {
9017 case IEMMODE_16BIT:
9018 IEM_MC_BEGIN(3, 1);
9019 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9020 IEM_MC_ARG(uint16_t, u16Src, 1);
9021 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9023
9024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9025 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9026 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
9027 if (pImpl->pfnLockedU16)
9028 IEMOP_HLP_DONE_DECODING();
9029 else
9030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9031 IEM_MC_FETCH_EFLAGS(EFlags);
9032 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9033 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9034 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9035 else
9036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9037 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9038
9039 IEM_MC_COMMIT_EFLAGS(EFlags);
9040 IEM_MC_ADVANCE_RIP();
9041 IEM_MC_END();
9042 return VINF_SUCCESS;
9043
9044 case IEMMODE_32BIT:
9045 IEM_MC_BEGIN(3, 1);
9046 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9047 IEM_MC_ARG(uint32_t, u32Src, 1);
9048 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9050
9051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9052 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9053 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
9054 if (pImpl->pfnLockedU16)
9055 IEMOP_HLP_DONE_DECODING();
9056 else
9057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9058 IEM_MC_FETCH_EFLAGS(EFlags);
9059 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9060 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9061 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9062 else
9063 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9064 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9065
9066 IEM_MC_COMMIT_EFLAGS(EFlags);
9067 IEM_MC_ADVANCE_RIP();
9068 IEM_MC_END();
9069 return VINF_SUCCESS;
9070
9071 case IEMMODE_64BIT:
9072 IEM_MC_BEGIN(3, 1);
9073 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9074 IEM_MC_ARG(uint64_t, u64Src, 1);
9075 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9077
9078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9079 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
9080 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
9081 if (pImpl->pfnLockedU16)
9082 IEMOP_HLP_DONE_DECODING();
9083 else
9084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9085 IEM_MC_FETCH_EFLAGS(EFlags);
9086 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
9087 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9088 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9089 else
9090 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9091 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9092
9093 IEM_MC_COMMIT_EFLAGS(EFlags);
9094 IEM_MC_ADVANCE_RIP();
9095 IEM_MC_END();
9096 return VINF_SUCCESS;
9097
9098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9099 }
9100 }
9101}
9102
9103
9104/** Opcode 0x0f 0xbb. */
9105FNIEMOP_DEF(iemOp_btc_Ev_Gv)
9106{
9107 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
9108 IEMOP_HLP_MIN_386();
9109 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
9110}
9111
9112
9113/**
9114 * Common worker for BSF and BSR instructions.
9115 *
9116 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
9117 * the destination register, which means that for 32-bit operations the high
9118 * bits must be left alone.
9119 *
9120 * @param pImpl Pointer to the instruction implementation (assembly).
9121 */
9122FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
9123{
9124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9125
9126 /*
9127 * If rm is denoting a register, no more instruction bytes.
9128 */
9129 if (IEM_IS_MODRM_REG_MODE(bRm))
9130 {
9131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9132 switch (pVCpu->iem.s.enmEffOpSize)
9133 {
9134 case IEMMODE_16BIT:
9135 IEM_MC_BEGIN(3, 0);
9136 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9137 IEM_MC_ARG(uint16_t, u16Src, 1);
9138 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9139
9140 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9141 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9142 IEM_MC_REF_EFLAGS(pEFlags);
9143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9144
9145 IEM_MC_ADVANCE_RIP();
9146 IEM_MC_END();
9147 break;
9148
9149 case IEMMODE_32BIT:
9150 IEM_MC_BEGIN(3, 0);
9151 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9152 IEM_MC_ARG(uint32_t, u32Src, 1);
9153 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9154
9155 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9156 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9157 IEM_MC_REF_EFLAGS(pEFlags);
9158 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9159 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9160 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9161 IEM_MC_ENDIF();
9162 IEM_MC_ADVANCE_RIP();
9163 IEM_MC_END();
9164 break;
9165
9166 case IEMMODE_64BIT:
9167 IEM_MC_BEGIN(3, 0);
9168 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9169 IEM_MC_ARG(uint64_t, u64Src, 1);
9170 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9171
9172 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
9173 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9174 IEM_MC_REF_EFLAGS(pEFlags);
9175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9176
9177 IEM_MC_ADVANCE_RIP();
9178 IEM_MC_END();
9179 break;
9180 }
9181 }
9182 else
9183 {
9184 /*
9185 * We're accessing memory.
9186 */
9187 switch (pVCpu->iem.s.enmEffOpSize)
9188 {
9189 case IEMMODE_16BIT:
9190 IEM_MC_BEGIN(3, 1);
9191 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9192 IEM_MC_ARG(uint16_t, u16Src, 1);
9193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9195
9196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9198 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9199 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9200 IEM_MC_REF_EFLAGS(pEFlags);
9201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9202
9203 IEM_MC_ADVANCE_RIP();
9204 IEM_MC_END();
9205 break;
9206
9207 case IEMMODE_32BIT:
9208 IEM_MC_BEGIN(3, 1);
9209 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9210 IEM_MC_ARG(uint32_t, u32Src, 1);
9211 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9213
9214 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9216 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9217 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9218 IEM_MC_REF_EFLAGS(pEFlags);
9219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9220
9221 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9222 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9223 IEM_MC_ENDIF();
9224 IEM_MC_ADVANCE_RIP();
9225 IEM_MC_END();
9226 break;
9227
9228 case IEMMODE_64BIT:
9229 IEM_MC_BEGIN(3, 1);
9230 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9231 IEM_MC_ARG(uint64_t, u64Src, 1);
9232 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9234
9235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9237 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9238 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
9239 IEM_MC_REF_EFLAGS(pEFlags);
9240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9241
9242 IEM_MC_ADVANCE_RIP();
9243 IEM_MC_END();
9244 break;
9245 }
9246 }
9247 return VINF_SUCCESS;
9248}
9249
9250
9251/** Opcode 0x0f 0xbc. */
9252FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
9253{
9254 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
9255 IEMOP_HLP_MIN_386();
9256 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9257 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
9258}
9259
9260
9261/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
9262FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
9263{
9264 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9265 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
9266 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9267
9268#ifndef TST_IEM_CHECK_MC
9269 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
9270 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
9271 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
9272 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
9273 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
9274 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
9275 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
9276 {
9277 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
9278 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
9279 };
9280#endif
9281 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9282 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9283 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9284}
9285
9286
9287/** Opcode 0x0f 0xbd. */
9288FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
9289{
9290 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
9291 IEMOP_HLP_MIN_386();
9292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9293 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
9294}
9295
9296
9297/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
9298FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
9299{
9300 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
9301 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
9302 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
9303
9304#ifndef TST_IEM_CHECK_MC
9305 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
9306 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
9307 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
9308 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
9309 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
9310 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
9311 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
9312 {
9313 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
9314 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
9315 };
9316#endif
9317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
9318 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm,
9319 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1));
9320}
9321
9322
9323
9324/** Opcode 0x0f 0xbe. */
9325FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
9326{
9327 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
9328 IEMOP_HLP_MIN_386();
9329
9330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9331
9332 /*
9333 * If rm is denoting a register, no more instruction bytes.
9334 */
9335 if (IEM_IS_MODRM_REG_MODE(bRm))
9336 {
9337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9338 switch (pVCpu->iem.s.enmEffOpSize)
9339 {
9340 case IEMMODE_16BIT:
9341 IEM_MC_BEGIN(0, 1);
9342 IEM_MC_LOCAL(uint16_t, u16Value);
9343 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9344 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9345 IEM_MC_ADVANCE_RIP();
9346 IEM_MC_END();
9347 return VINF_SUCCESS;
9348
9349 case IEMMODE_32BIT:
9350 IEM_MC_BEGIN(0, 1);
9351 IEM_MC_LOCAL(uint32_t, u32Value);
9352 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9353 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9354 IEM_MC_ADVANCE_RIP();
9355 IEM_MC_END();
9356 return VINF_SUCCESS;
9357
9358 case IEMMODE_64BIT:
9359 IEM_MC_BEGIN(0, 1);
9360 IEM_MC_LOCAL(uint64_t, u64Value);
9361 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9362 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9363 IEM_MC_ADVANCE_RIP();
9364 IEM_MC_END();
9365 return VINF_SUCCESS;
9366
9367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9368 }
9369 }
9370 else
9371 {
9372 /*
9373 * We're loading a register from memory.
9374 */
9375 switch (pVCpu->iem.s.enmEffOpSize)
9376 {
9377 case IEMMODE_16BIT:
9378 IEM_MC_BEGIN(0, 2);
9379 IEM_MC_LOCAL(uint16_t, u16Value);
9380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9383 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9384 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
9385 IEM_MC_ADVANCE_RIP();
9386 IEM_MC_END();
9387 return VINF_SUCCESS;
9388
9389 case IEMMODE_32BIT:
9390 IEM_MC_BEGIN(0, 2);
9391 IEM_MC_LOCAL(uint32_t, u32Value);
9392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9395 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9396 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9397 IEM_MC_ADVANCE_RIP();
9398 IEM_MC_END();
9399 return VINF_SUCCESS;
9400
9401 case IEMMODE_64BIT:
9402 IEM_MC_BEGIN(0, 2);
9403 IEM_MC_LOCAL(uint64_t, u64Value);
9404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9407 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9408 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9409 IEM_MC_ADVANCE_RIP();
9410 IEM_MC_END();
9411 return VINF_SUCCESS;
9412
9413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9414 }
9415 }
9416}
9417
9418
9419/** Opcode 0x0f 0xbf. */
9420FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
9421{
9422 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
9423 IEMOP_HLP_MIN_386();
9424
9425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9426
9427 /** @todo Not entirely sure how the operand size prefix is handled here,
9428 * assuming that it will be ignored. Would be nice to have a few
9429 * test for this. */
9430 /*
9431 * If rm is denoting a register, no more instruction bytes.
9432 */
9433 if (IEM_IS_MODRM_REG_MODE(bRm))
9434 {
9435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9436 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9437 {
9438 IEM_MC_BEGIN(0, 1);
9439 IEM_MC_LOCAL(uint32_t, u32Value);
9440 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9441 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9442 IEM_MC_ADVANCE_RIP();
9443 IEM_MC_END();
9444 }
9445 else
9446 {
9447 IEM_MC_BEGIN(0, 1);
9448 IEM_MC_LOCAL(uint64_t, u64Value);
9449 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
9450 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9451 IEM_MC_ADVANCE_RIP();
9452 IEM_MC_END();
9453 }
9454 }
9455 else
9456 {
9457 /*
9458 * We're loading a register from memory.
9459 */
9460 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
9461 {
9462 IEM_MC_BEGIN(0, 2);
9463 IEM_MC_LOCAL(uint32_t, u32Value);
9464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9467 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9468 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
9469 IEM_MC_ADVANCE_RIP();
9470 IEM_MC_END();
9471 }
9472 else
9473 {
9474 IEM_MC_BEGIN(0, 2);
9475 IEM_MC_LOCAL(uint64_t, u64Value);
9476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9479 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9480 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
9481 IEM_MC_ADVANCE_RIP();
9482 IEM_MC_END();
9483 }
9484 }
9485 return VINF_SUCCESS;
9486}
9487
9488
9489/** Opcode 0x0f 0xc0. */
9490FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
9491{
9492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9493 IEMOP_HLP_MIN_486();
9494 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
9495
9496 /*
9497 * If rm is denoting a register, no more instruction bytes.
9498 */
9499 if (IEM_IS_MODRM_REG_MODE(bRm))
9500 {
9501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9502
9503 IEM_MC_BEGIN(3, 0);
9504 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9505 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9506 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9507
9508 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9509 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9510 IEM_MC_REF_EFLAGS(pEFlags);
9511 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9512
9513 IEM_MC_ADVANCE_RIP();
9514 IEM_MC_END();
9515 }
9516 else
9517 {
9518 /*
9519 * We're accessing memory.
9520 */
9521 IEM_MC_BEGIN(3, 3);
9522 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9523 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9524 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9525 IEM_MC_LOCAL(uint8_t, u8RegCopy);
9526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9527
9528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9529 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9530 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9531 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
9532 IEM_MC_FETCH_EFLAGS(EFlags);
9533 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9534 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
9535 else
9536 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
9537
9538 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9539 IEM_MC_COMMIT_EFLAGS(EFlags);
9540 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
9541 IEM_MC_ADVANCE_RIP();
9542 IEM_MC_END();
9543 return VINF_SUCCESS;
9544 }
9545 return VINF_SUCCESS;
9546}
9547
9548
9549/** Opcode 0x0f 0xc1. */
9550FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
9551{
9552 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
9553 IEMOP_HLP_MIN_486();
9554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9555
9556 /*
9557 * If rm is denoting a register, no more instruction bytes.
9558 */
9559 if (IEM_IS_MODRM_REG_MODE(bRm))
9560 {
9561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9562
9563 switch (pVCpu->iem.s.enmEffOpSize)
9564 {
9565 case IEMMODE_16BIT:
9566 IEM_MC_BEGIN(3, 0);
9567 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9568 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9569 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9570
9571 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9572 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9573 IEM_MC_REF_EFLAGS(pEFlags);
9574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9575
9576 IEM_MC_ADVANCE_RIP();
9577 IEM_MC_END();
9578 return VINF_SUCCESS;
9579
9580 case IEMMODE_32BIT:
9581 IEM_MC_BEGIN(3, 0);
9582 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9583 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9584 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9585
9586 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9587 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9588 IEM_MC_REF_EFLAGS(pEFlags);
9589 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9590
9591 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9592 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9593 IEM_MC_ADVANCE_RIP();
9594 IEM_MC_END();
9595 return VINF_SUCCESS;
9596
9597 case IEMMODE_64BIT:
9598 IEM_MC_BEGIN(3, 0);
9599 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9600 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9601 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9602
9603 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9604 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
9605 IEM_MC_REF_EFLAGS(pEFlags);
9606 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9607
9608 IEM_MC_ADVANCE_RIP();
9609 IEM_MC_END();
9610 return VINF_SUCCESS;
9611
9612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9613 }
9614 }
9615 else
9616 {
9617 /*
9618 * We're accessing memory.
9619 */
9620 switch (pVCpu->iem.s.enmEffOpSize)
9621 {
9622 case IEMMODE_16BIT:
9623 IEM_MC_BEGIN(3, 3);
9624 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9625 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9626 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9627 IEM_MC_LOCAL(uint16_t, u16RegCopy);
9628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9629
9630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9631 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9632 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9633 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
9634 IEM_MC_FETCH_EFLAGS(EFlags);
9635 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9636 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
9637 else
9638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
9639
9640 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9641 IEM_MC_COMMIT_EFLAGS(EFlags);
9642 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
9643 IEM_MC_ADVANCE_RIP();
9644 IEM_MC_END();
9645 return VINF_SUCCESS;
9646
9647 case IEMMODE_32BIT:
9648 IEM_MC_BEGIN(3, 3);
9649 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9650 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9651 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9652 IEM_MC_LOCAL(uint32_t, u32RegCopy);
9653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9654
9655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9656 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9657 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9658 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
9659 IEM_MC_FETCH_EFLAGS(EFlags);
9660 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9661 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
9662 else
9663 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
9664
9665 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9666 IEM_MC_COMMIT_EFLAGS(EFlags);
9667 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
9668 IEM_MC_ADVANCE_RIP();
9669 IEM_MC_END();
9670 return VINF_SUCCESS;
9671
9672 case IEMMODE_64BIT:
9673 IEM_MC_BEGIN(3, 3);
9674 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9675 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9676 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9677 IEM_MC_LOCAL(uint64_t, u64RegCopy);
9678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9679
9680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9681 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9682 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
9683 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
9684 IEM_MC_FETCH_EFLAGS(EFlags);
9685 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9686 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
9687 else
9688 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
9689
9690 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9691 IEM_MC_COMMIT_EFLAGS(EFlags);
9692 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
9693 IEM_MC_ADVANCE_RIP();
9694 IEM_MC_END();
9695 return VINF_SUCCESS;
9696
9697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9698 }
9699 }
9700}
9701
9702
9703/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
9704FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib);
9705/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
9706FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib);
9707/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
9708FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib);
9709/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
9710FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib);
9711
9712
9713/** Opcode 0x0f 0xc3. */
9714FNIEMOP_DEF(iemOp_movnti_My_Gy)
9715{
9716 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
9717
9718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9719
9720 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
9721 if (IEM_IS_MODRM_MEM_MODE(bRm))
9722 {
9723 switch (pVCpu->iem.s.enmEffOpSize)
9724 {
9725 case IEMMODE_32BIT:
9726 IEM_MC_BEGIN(0, 2);
9727 IEM_MC_LOCAL(uint32_t, u32Value);
9728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9729
9730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9732 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9733 return IEMOP_RAISE_INVALID_OPCODE();
9734
9735 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9736 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
9737 IEM_MC_ADVANCE_RIP();
9738 IEM_MC_END();
9739 break;
9740
9741 case IEMMODE_64BIT:
9742 IEM_MC_BEGIN(0, 2);
9743 IEM_MC_LOCAL(uint64_t, u64Value);
9744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9745
9746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9748 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
9749 return IEMOP_RAISE_INVALID_OPCODE();
9750
9751 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
9752 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
9753 IEM_MC_ADVANCE_RIP();
9754 IEM_MC_END();
9755 break;
9756
9757 case IEMMODE_16BIT:
9758 /** @todo check this form. */
9759 return IEMOP_RAISE_INVALID_OPCODE();
9760 }
9761 }
9762 else
9763 return IEMOP_RAISE_INVALID_OPCODE();
9764 return VINF_SUCCESS;
9765}
9766/* Opcode 0x66 0x0f 0xc3 - invalid */
9767/* Opcode 0xf3 0x0f 0xc3 - invalid */
9768/* Opcode 0xf2 0x0f 0xc3 - invalid */
9769
9770/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
9771FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
9772/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
9773FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib);
9774/* Opcode 0xf3 0x0f 0xc4 - invalid */
9775/* Opcode 0xf2 0x0f 0xc4 - invalid */
9776
9777/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
9778FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
9779/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
9780FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib);
9781/* Opcode 0xf3 0x0f 0xc5 - invalid */
9782/* Opcode 0xf2 0x0f 0xc5 - invalid */
9783
9784/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
9785FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib);
9786/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
9787FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib);
9788/* Opcode 0xf3 0x0f 0xc6 - invalid */
9789/* Opcode 0xf2 0x0f 0xc6 - invalid */
9790
9791
9792/** Opcode 0x0f 0xc7 !11/1. */
9793FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
9794{
9795 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
9796
9797 IEM_MC_BEGIN(4, 3);
9798 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
9799 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
9800 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
9801 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9802 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
9803 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
9804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9805
9806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9807 IEMOP_HLP_DONE_DECODING();
9808 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9809
9810 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
9811 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
9812 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
9813
9814 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
9815 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
9816 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
9817
9818 IEM_MC_FETCH_EFLAGS(EFlags);
9819 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9820 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9821 else
9822 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
9823
9824 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
9825 IEM_MC_COMMIT_EFLAGS(EFlags);
9826 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9827 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
9828 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
9829 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
9830 IEM_MC_ENDIF();
9831 IEM_MC_ADVANCE_RIP();
9832
9833 IEM_MC_END();
9834 return VINF_SUCCESS;
9835}
9836
9837
9838/** Opcode REX.W 0x0f 0xc7 !11/1. */
9839FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
9840{
9841 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
9842 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9843 {
9844#if 0
9845 RT_NOREF(bRm);
9846 IEMOP_BITCH_ABOUT_STUB();
9847 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
9848#else
9849 IEM_MC_BEGIN(4, 3);
9850 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
9851 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
9852 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
9853 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
9854 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
9855 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
9856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9857
9858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9859 IEMOP_HLP_DONE_DECODING();
9860 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
9861 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9862
9863 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
9864 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
9865 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
9866
9867 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
9868 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
9869 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
9870
9871 IEM_MC_FETCH_EFLAGS(EFlags);
9872# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)
9873# if defined(RT_ARCH_AMD64)
9874 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
9875# endif
9876 {
9877 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9878 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9879 else
9880 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9881 }
9882# if defined(RT_ARCH_AMD64)
9883 else
9884# endif
9885# endif
9886# if !defined(RT_ARCH_ARM64) /** @todo may need this for unaligned accesses... */
9887 {
9888 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
9889 accesses and not all all atomic, which works fine on in UNI CPU guest
9890 configuration (ignoring DMA). If guest SMP is active we have no choice
9891 but to use a rendezvous callback here. Sigh. */
9892 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
9893 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9894 else
9895 {
9896 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
9897 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
9898 }
9899 }
9900# endif
9901
9902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
9903 IEM_MC_COMMIT_EFLAGS(EFlags);
9904 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
9905 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
9906 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
9907 IEM_MC_ENDIF();
9908 IEM_MC_ADVANCE_RIP();
9909
9910 IEM_MC_END();
9911 return VINF_SUCCESS;
9912#endif
9913 }
9914 Log(("cmpxchg16b -> #UD\n"));
9915 return IEMOP_RAISE_INVALID_OPCODE();
9916}
9917
9918FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
9919{
9920 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
9921 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
9922 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
9923}
9924
9925/** Opcode 0x0f 0xc7 11/6. */
9926FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
9927
9928/** Opcode 0x0f 0xc7 !11/6. */
9929#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9930FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
9931{
9932 IEMOP_MNEMONIC(vmptrld, "vmptrld");
9933 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
9934 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
9935 IEM_MC_BEGIN(2, 0);
9936 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9937 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9939 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
9940 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9941 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
9942 IEM_MC_END();
9943 return VINF_SUCCESS;
9944}
9945#else
9946FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
9947#endif
9948
9949/** Opcode 0x66 0x0f 0xc7 !11/6. */
9950#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9951FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
9952{
9953 IEMOP_MNEMONIC(vmclear, "vmclear");
9954 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
9955 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
9956 IEM_MC_BEGIN(2, 0);
9957 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9958 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
9959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9960 IEMOP_HLP_DONE_DECODING();
9961 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9962 IEM_MC_CALL_CIMPL_2(iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
9963 IEM_MC_END();
9964 return VINF_SUCCESS;
9965}
9966#else
9967FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
9968#endif
9969
9970/** Opcode 0xf3 0x0f 0xc7 !11/6. */
9971#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9972FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
9973{
9974 IEMOP_MNEMONIC(vmxon, "vmxon");
9975 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
9976 IEM_MC_BEGIN(2, 0);
9977 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9978 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
9979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9980 IEMOP_HLP_DONE_DECODING();
9981 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9982 IEM_MC_CALL_CIMPL_2(iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
9983 IEM_MC_END();
9984 return VINF_SUCCESS;
9985}
9986#else
9987FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
9988#endif
9989
9990/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
9991#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
9992FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
9993{
9994 IEMOP_MNEMONIC(vmptrst, "vmptrst");
9995 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
9996 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
9997 IEM_MC_BEGIN(2, 0);
9998 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9999 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
10000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10001 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
10002 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10003 IEM_MC_CALL_CIMPL_2(iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
10004 IEM_MC_END();
10005 return VINF_SUCCESS;
10006}
10007#else
10008FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
10009#endif
10010
10011/** Opcode 0x0f 0xc7 11/7. */
10012FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm);
10013
10014
10015/**
10016 * Group 9 jump table for register variant.
10017 */
10018IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
10019{ /* pfx: none, 066h, 0f3h, 0f2h */
10020 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
10021 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
10022 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
10023 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
10024 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10025 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
10026 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10027 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10028};
10029AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
10030
10031
10032/**
10033 * Group 9 jump table for memory variant.
10034 */
10035IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
10036{ /* pfx: none, 066h, 0f3h, 0f2h */
10037 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
10038 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
10039 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
10040 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
10041 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10042 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
10043 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
10044 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10045};
10046AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
10047
10048
10049/** Opcode 0x0f 0xc7. */
10050FNIEMOP_DEF(iemOp_Grp9)
10051{
10052 uint8_t bRm; IEM_OPCODE_GET_NEXT_RM(&bRm);
10053 if (IEM_IS_MODRM_REG_MODE(bRm))
10054 /* register, register */
10055 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10056 + pVCpu->iem.s.idxPrefix], bRm);
10057 /* memory, register */
10058 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10059 + pVCpu->iem.s.idxPrefix], bRm);
10060}
10061
10062
10063/**
10064 * Common 'bswap register' helper.
10065 */
10066FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
10067{
10068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10069 switch (pVCpu->iem.s.enmEffOpSize)
10070 {
10071 case IEMMODE_16BIT:
10072 IEM_MC_BEGIN(1, 0);
10073 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10074 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
10075 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
10076 IEM_MC_ADVANCE_RIP();
10077 IEM_MC_END();
10078 return VINF_SUCCESS;
10079
10080 case IEMMODE_32BIT:
10081 IEM_MC_BEGIN(1, 0);
10082 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10083 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
10084 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10085 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
10086 IEM_MC_ADVANCE_RIP();
10087 IEM_MC_END();
10088 return VINF_SUCCESS;
10089
10090 case IEMMODE_64BIT:
10091 IEM_MC_BEGIN(1, 0);
10092 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10093 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
10094 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
10095 IEM_MC_ADVANCE_RIP();
10096 IEM_MC_END();
10097 return VINF_SUCCESS;
10098
10099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10100 }
10101}
10102
10103
10104/** Opcode 0x0f 0xc8. */
10105FNIEMOP_DEF(iemOp_bswap_rAX_r8)
10106{
10107 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
10108 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
10109 prefix. REX.B is the correct prefix it appears. For a parallel
10110 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
10111 IEMOP_HLP_MIN_486();
10112 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
10113}
10114
10115
10116/** Opcode 0x0f 0xc9. */
10117FNIEMOP_DEF(iemOp_bswap_rCX_r9)
10118{
10119 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
10120 IEMOP_HLP_MIN_486();
10121 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
10122}
10123
10124
10125/** Opcode 0x0f 0xca. */
10126FNIEMOP_DEF(iemOp_bswap_rDX_r10)
10127{
10128 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
10129 IEMOP_HLP_MIN_486();
10130 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
10131}
10132
10133
10134/** Opcode 0x0f 0xcb. */
10135FNIEMOP_DEF(iemOp_bswap_rBX_r11)
10136{
10137 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
10138 IEMOP_HLP_MIN_486();
10139 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
10140}
10141
10142
10143/** Opcode 0x0f 0xcc. */
10144FNIEMOP_DEF(iemOp_bswap_rSP_r12)
10145{
10146 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
10147 IEMOP_HLP_MIN_486();
10148 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
10149}
10150
10151
10152/** Opcode 0x0f 0xcd. */
10153FNIEMOP_DEF(iemOp_bswap_rBP_r13)
10154{
10155 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
10156 IEMOP_HLP_MIN_486();
10157 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
10158}
10159
10160
10161/** Opcode 0x0f 0xce. */
10162FNIEMOP_DEF(iemOp_bswap_rSI_r14)
10163{
10164 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
10165 IEMOP_HLP_MIN_486();
10166 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
10167}
10168
10169
10170/** Opcode 0x0f 0xcf. */
10171FNIEMOP_DEF(iemOp_bswap_rDI_r15)
10172{
10173 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
10174 IEMOP_HLP_MIN_486();
10175 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
10176}
10177
10178
10179/* Opcode 0x0f 0xd0 - invalid */
10180/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
10181FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd);
10182/* Opcode 0xf3 0x0f 0xd0 - invalid */
10183/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
10184FNIEMOP_STUB(iemOp_addsubps_Vps_Wps);
10185
10186/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
10187FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
10188{
10189 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10190 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
10191}
10192
10193/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
10194FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
10195{
10196 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10197 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
10198}
10199
10200/* Opcode 0xf3 0x0f 0xd1 - invalid */
10201/* Opcode 0xf2 0x0f 0xd1 - invalid */
10202
10203/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
10204FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
10205{
10206 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
10207 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
10208}
10209
10210
10211/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
10212FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
10213{
10214 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10215 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
10216}
10217
10218
10219/* Opcode 0xf3 0x0f 0xd2 - invalid */
10220/* Opcode 0xf2 0x0f 0xd2 - invalid */
10221
10222/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
10223FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
10224{
10225 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10226 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
10227}
10228
10229
10230/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
10231FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
10232{
10233 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
10234 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
10235}
10236
10237
10238/* Opcode 0xf3 0x0f 0xd3 - invalid */
10239/* Opcode 0xf2 0x0f 0xd3 - invalid */
10240
10241
10242/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
10243FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
10244{
10245 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10246 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_paddq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
10247}
10248
10249
10250/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
10251FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
10252{
10253 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10254 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
10255}
10256
10257
10258/* Opcode 0xf3 0x0f 0xd4 - invalid */
10259/* Opcode 0xf2 0x0f 0xd4 - invalid */
10260
10261/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
10262FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
10263{
10264 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10265 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
10266}
10267
10268/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
10269FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
10270{
10271 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10272 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
10273}
10274
10275
10276/* Opcode 0xf3 0x0f 0xd5 - invalid */
10277/* Opcode 0xf2 0x0f 0xd5 - invalid */
10278
10279/* Opcode 0x0f 0xd6 - invalid */
10280
10281/**
10282 * @opcode 0xd6
10283 * @oppfx 0x66
10284 * @opcpuid sse2
10285 * @opgroup og_sse2_pcksclr_datamove
10286 * @opxcpttype none
10287 * @optest op1=-1 op2=2 -> op1=2
10288 * @optest op1=0 op2=-42 -> op1=-42
10289 */
10290FNIEMOP_DEF(iemOp_movq_Wq_Vq)
10291{
10292 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10294 if (IEM_IS_MODRM_REG_MODE(bRm))
10295 {
10296 /*
10297 * Register, register.
10298 */
10299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10300 IEM_MC_BEGIN(0, 2);
10301 IEM_MC_LOCAL(uint64_t, uSrc);
10302
10303 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10304 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
10305
10306 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10307 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
10308
10309 IEM_MC_ADVANCE_RIP();
10310 IEM_MC_END();
10311 }
10312 else
10313 {
10314 /*
10315 * Memory, register.
10316 */
10317 IEM_MC_BEGIN(0, 2);
10318 IEM_MC_LOCAL(uint64_t, uSrc);
10319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10320
10321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10323 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10324 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10325
10326 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10327 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10328
10329 IEM_MC_ADVANCE_RIP();
10330 IEM_MC_END();
10331 }
10332 return VINF_SUCCESS;
10333}
10334
10335
10336/**
10337 * @opcode 0xd6
10338 * @opcodesub 11 mr/reg
10339 * @oppfx f3
10340 * @opcpuid sse2
10341 * @opgroup og_sse2_simdint_datamove
10342 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10343 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10344 */
10345FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
10346{
10347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10348 if (IEM_IS_MODRM_REG_MODE(bRm))
10349 {
10350 /*
10351 * Register, register.
10352 */
10353 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10355 IEM_MC_BEGIN(0, 1);
10356 IEM_MC_LOCAL(uint64_t, uSrc);
10357
10358 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10359 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10360
10361 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
10362 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
10363 IEM_MC_FPU_TO_MMX_MODE();
10364
10365 IEM_MC_ADVANCE_RIP();
10366 IEM_MC_END();
10367 return VINF_SUCCESS;
10368 }
10369
10370 /**
10371 * @opdone
10372 * @opmnemonic udf30fd6mem
10373 * @opcode 0xd6
10374 * @opcodesub !11 mr/reg
10375 * @oppfx f3
10376 * @opunused intel-modrm
10377 * @opcpuid sse
10378 * @optest ->
10379 */
10380 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10381}
10382
10383
10384/**
10385 * @opcode 0xd6
10386 * @opcodesub 11 mr/reg
10387 * @oppfx f2
10388 * @opcpuid sse2
10389 * @opgroup og_sse2_simdint_datamove
10390 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
10391 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10392 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
10393 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
10394 * @optest op1=-42 op2=0xfedcba9876543210
10395 * -> op1=0xfedcba9876543210 ftw=0xff
10396 */
10397FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
10398{
10399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10400 if (IEM_IS_MODRM_REG_MODE(bRm))
10401 {
10402 /*
10403 * Register, register.
10404 */
10405 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10407 IEM_MC_BEGIN(0, 1);
10408 IEM_MC_LOCAL(uint64_t, uSrc);
10409
10410 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10411 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10412
10413 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10414 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
10415 IEM_MC_FPU_TO_MMX_MODE();
10416
10417 IEM_MC_ADVANCE_RIP();
10418 IEM_MC_END();
10419 return VINF_SUCCESS;
10420 }
10421
10422 /**
10423 * @opdone
10424 * @opmnemonic udf20fd6mem
10425 * @opcode 0xd6
10426 * @opcodesub !11 mr/reg
10427 * @oppfx f2
10428 * @opunused intel-modrm
10429 * @opcpuid sse
10430 * @optest ->
10431 */
10432 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
10433}
10434
10435
10436/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
10437FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
10438{
10439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10440 /* Docs says register only. */
10441 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10442 {
10443 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10444 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS, 0);
10445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10446 IEM_MC_BEGIN(2, 0);
10447 IEM_MC_ARG(uint64_t *, puDst, 0);
10448 IEM_MC_ARG(uint64_t const *, puSrc, 1);
10449 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
10450 IEM_MC_PREPARE_FPU_USAGE();
10451 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
10452 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
10453 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
10454 IEM_MC_FPU_TO_MMX_MODE();
10455 IEM_MC_ADVANCE_RIP();
10456 IEM_MC_END();
10457 return VINF_SUCCESS;
10458 }
10459 return IEMOP_RAISE_INVALID_OPCODE();
10460}
10461
10462
10463/** Opcode 0x66 0x0f 0xd7 - */
10464FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
10465{
10466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10467 /* Docs says register only. */
10468 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
10469 {
10470 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
10471 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
10472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10473 IEM_MC_BEGIN(2, 0);
10474 IEM_MC_ARG(uint64_t *, puDst, 0);
10475 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
10476 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10477 IEM_MC_PREPARE_SSE_USAGE();
10478 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
10479 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
10480 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
10481 IEM_MC_ADVANCE_RIP();
10482 IEM_MC_END();
10483 return VINF_SUCCESS;
10484 }
10485 return IEMOP_RAISE_INVALID_OPCODE();
10486}
10487
10488
10489/* Opcode 0xf3 0x0f 0xd7 - invalid */
10490/* Opcode 0xf2 0x0f 0xd7 - invalid */
10491
10492
10493/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
10494FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
10495{
10496 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10497 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
10498}
10499
10500
10501/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
10502FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
10503{
10504 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10505 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
10506}
10507
10508
10509/* Opcode 0xf3 0x0f 0xd8 - invalid */
10510/* Opcode 0xf2 0x0f 0xd8 - invalid */
10511
10512/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
10513FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
10514{
10515 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10516 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
10517}
10518
10519
10520/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
10521FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
10522{
10523 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10524 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
10525}
10526
10527
10528/* Opcode 0xf3 0x0f 0xd9 - invalid */
10529/* Opcode 0xf2 0x0f 0xd9 - invalid */
10530
10531/** Opcode 0x0f 0xda - pminub Pq, Qq */
10532FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
10533{
10534 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10535 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
10536}
10537
10538
10539/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
10540FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
10541{
10542 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10543 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
10544}
10545
10546/* Opcode 0xf3 0x0f 0xda - invalid */
10547/* Opcode 0xf2 0x0f 0xda - invalid */
10548
10549/** Opcode 0x0f 0xdb - pand Pq, Qq */
10550FNIEMOP_DEF(iemOp_pand_Pq_Qq)
10551{
10552 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10553 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
10554}
10555
10556
10557/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
10558FNIEMOP_DEF(iemOp_pand_Vx_Wx)
10559{
10560 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10561 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
10562}
10563
10564
10565/* Opcode 0xf3 0x0f 0xdb - invalid */
10566/* Opcode 0xf2 0x0f 0xdb - invalid */
10567
10568/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
10569FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
10570{
10571 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10572 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
10573}
10574
10575
10576/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
10577FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
10578{
10579 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10580 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
10581}
10582
10583
10584/* Opcode 0xf3 0x0f 0xdc - invalid */
10585/* Opcode 0xf2 0x0f 0xdc - invalid */
10586
10587/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
10588FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
10589{
10590 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10591 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
10592}
10593
10594
10595/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
10596FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
10597{
10598 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10599 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
10600}
10601
10602
10603/* Opcode 0xf3 0x0f 0xdd - invalid */
10604/* Opcode 0xf2 0x0f 0xdd - invalid */
10605
10606/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
10607FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
10608{
10609 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10610 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
10611}
10612
10613
10614/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
10615FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
10616{
10617 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10618 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
10619}
10620
10621/* Opcode 0xf3 0x0f 0xde - invalid */
10622/* Opcode 0xf2 0x0f 0xde - invalid */
10623
10624
10625/** Opcode 0x0f 0xdf - pandn Pq, Qq */
10626FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
10627{
10628 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10629 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
10630}
10631
10632
10633/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
10634FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
10635{
10636 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10637 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
10638}
10639
10640
10641/* Opcode 0xf3 0x0f 0xdf - invalid */
10642/* Opcode 0xf2 0x0f 0xdf - invalid */
10643
10644/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
10645FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
10646{
10647 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10648 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
10649}
10650
10651
10652/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
10653FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
10654{
10655 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10656 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
10657}
10658
10659
10660/* Opcode 0xf3 0x0f 0xe0 - invalid */
10661/* Opcode 0xf2 0x0f 0xe0 - invalid */
10662
10663/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
10664FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
10665{
10666 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10667 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
10668}
10669
10670
10671/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
10672FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
10673{
10674 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10675 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
10676}
10677
10678
10679/* Opcode 0xf3 0x0f 0xe1 - invalid */
10680/* Opcode 0xf2 0x0f 0xe1 - invalid */
10681
10682/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
10683FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
10684{
10685 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
10686 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
10687}
10688
10689
10690/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
10691FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
10692{
10693 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10694 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
10695}
10696
10697
10698/* Opcode 0xf3 0x0f 0xe2 - invalid */
10699/* Opcode 0xf2 0x0f 0xe2 - invalid */
10700
10701/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
10702FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
10703{
10704 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10705 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
10706}
10707
10708
10709/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
10710FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
10711{
10712 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10713 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
10714}
10715
10716
10717/* Opcode 0xf3 0x0f 0xe3 - invalid */
10718/* Opcode 0xf2 0x0f 0xe3 - invalid */
10719
10720/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
10721FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
10722{
10723 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10724 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
10725}
10726
10727
10728/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
10729FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
10730{
10731 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10732 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
10733}
10734
10735
10736/* Opcode 0xf3 0x0f 0xe4 - invalid */
10737/* Opcode 0xf2 0x0f 0xe4 - invalid */
10738
10739/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
10740FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
10741{
10742 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10743 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
10744}
10745
10746
10747/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
10748FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
10749{
10750 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10751 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
10752}
10753
10754
10755/* Opcode 0xf3 0x0f 0xe5 - invalid */
10756/* Opcode 0xf2 0x0f 0xe5 - invalid */
10757
10758/* Opcode 0x0f 0xe6 - invalid */
10759/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
10760FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd);
10761/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
10762FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd);
10763/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
10764FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd);
10765
10766
10767/**
10768 * @opcode 0xe7
10769 * @opcodesub !11 mr/reg
10770 * @oppfx none
10771 * @opcpuid sse
10772 * @opgroup og_sse1_cachect
10773 * @opxcpttype none
10774 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
10775 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
10776 */
10777FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
10778{
10779 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10781 if (IEM_IS_MODRM_MEM_MODE(bRm))
10782 {
10783 /* Register, memory. */
10784 IEM_MC_BEGIN(0, 2);
10785 IEM_MC_LOCAL(uint64_t, uSrc);
10786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10787
10788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10790 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
10791 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10792
10793 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
10794 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10795 IEM_MC_FPU_TO_MMX_MODE();
10796
10797 IEM_MC_ADVANCE_RIP();
10798 IEM_MC_END();
10799 return VINF_SUCCESS;
10800 }
10801 /**
10802 * @opdone
10803 * @opmnemonic ud0fe7reg
10804 * @opcode 0xe7
10805 * @opcodesub 11 mr/reg
10806 * @oppfx none
10807 * @opunused immediate
10808 * @opcpuid sse
10809 * @optest ->
10810 */
10811 return IEMOP_RAISE_INVALID_OPCODE();
10812}
10813
10814/**
10815 * @opcode 0xe7
10816 * @opcodesub !11 mr/reg
10817 * @oppfx 0x66
10818 * @opcpuid sse2
10819 * @opgroup og_sse2_cachect
10820 * @opxcpttype 1
10821 * @optest op1=-1 op2=2 -> op1=2
10822 * @optest op1=0 op2=-42 -> op1=-42
10823 */
10824FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
10825{
10826 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
10827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10828 if (IEM_IS_MODRM_MEM_MODE(bRm))
10829 {
10830 /* Register, memory. */
10831 IEM_MC_BEGIN(0, 2);
10832 IEM_MC_LOCAL(RTUINT128U, uSrc);
10833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10834
10835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10837 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
10838 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
10839
10840 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
10841 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
10842
10843 IEM_MC_ADVANCE_RIP();
10844 IEM_MC_END();
10845 return VINF_SUCCESS;
10846 }
10847
10848 /**
10849 * @opdone
10850 * @opmnemonic ud660fe7reg
10851 * @opcode 0xe7
10852 * @opcodesub 11 mr/reg
10853 * @oppfx 0x66
10854 * @opunused immediate
10855 * @opcpuid sse
10856 * @optest ->
10857 */
10858 return IEMOP_RAISE_INVALID_OPCODE();
10859}
10860
10861/* Opcode 0xf3 0x0f 0xe7 - invalid */
10862/* Opcode 0xf2 0x0f 0xe7 - invalid */
10863
10864
10865/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
10866FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
10867{
10868 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10869 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
10870}
10871
10872
10873/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
10874FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
10875{
10876 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10877 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
10878}
10879
10880
10881/* Opcode 0xf3 0x0f 0xe8 - invalid */
10882/* Opcode 0xf2 0x0f 0xe8 - invalid */
10883
10884/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
10885FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
10886{
10887 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10888 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
10889}
10890
10891
10892/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
10893FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
10894{
10895 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10896 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
10897}
10898
10899
10900/* Opcode 0xf3 0x0f 0xe9 - invalid */
10901/* Opcode 0xf2 0x0f 0xe9 - invalid */
10902
10903
10904/** Opcode 0x0f 0xea - pminsw Pq, Qq */
10905FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
10906{
10907 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10908 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
10909}
10910
10911
10912/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
10913FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
10914{
10915 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10916 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
10917}
10918
10919
10920/* Opcode 0xf3 0x0f 0xea - invalid */
10921/* Opcode 0xf2 0x0f 0xea - invalid */
10922
10923
10924/** Opcode 0x0f 0xeb - por Pq, Qq */
10925FNIEMOP_DEF(iemOp_por_Pq_Qq)
10926{
10927 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10928 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
10929}
10930
10931
10932/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
10933FNIEMOP_DEF(iemOp_por_Vx_Wx)
10934{
10935 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10936 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
10937}
10938
10939
10940/* Opcode 0xf3 0x0f 0xeb - invalid */
10941/* Opcode 0xf2 0x0f 0xeb - invalid */
10942
10943/** Opcode 0x0f 0xec - paddsb Pq, Qq */
10944FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
10945{
10946 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10947 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
10948}
10949
10950
10951/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
10952FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
10953{
10954 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10955 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
10956}
10957
10958
10959/* Opcode 0xf3 0x0f 0xec - invalid */
10960/* Opcode 0xf2 0x0f 0xec - invalid */
10961
10962/** Opcode 0x0f 0xed - paddsw Pq, Qq */
10963FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
10964{
10965 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10966 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
10967}
10968
10969
10970/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
10971FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
10972{
10973 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10974 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
10975}
10976
10977
10978/* Opcode 0xf3 0x0f 0xed - invalid */
10979/* Opcode 0xf2 0x0f 0xed - invalid */
10980
10981
10982/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
10983FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
10984{
10985 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10986 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
10987}
10988
10989
10990/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
10991FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
10992{
10993 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
10994 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
10995}
10996
10997
10998/* Opcode 0xf3 0x0f 0xee - invalid */
10999/* Opcode 0xf2 0x0f 0xee - invalid */
11000
11001
11002/** Opcode 0x0f 0xef - pxor Pq, Qq */
11003FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
11004{
11005 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11006 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
11007}
11008
11009
11010/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
11011FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
11012{
11013 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11014 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
11015}
11016
11017
11018/* Opcode 0xf3 0x0f 0xef - invalid */
11019/* Opcode 0xf2 0x0f 0xef - invalid */
11020
11021/* Opcode 0x0f 0xf0 - invalid */
11022/* Opcode 0x66 0x0f 0xf0 - invalid */
11023/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
11024FNIEMOP_STUB(iemOp_lddqu_Vx_Mx);
11025
11026
11027/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
11028FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
11029{
11030 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11031 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
11032}
11033
11034
11035/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
11036FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
11037{
11038 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11039 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
11040}
11041
11042
11043/* Opcode 0xf2 0x0f 0xf1 - invalid */
11044
11045/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
11046FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
11047{
11048 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11049 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
11050}
11051
11052
11053/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
11054FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
11055{
11056 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11057 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
11058}
11059
11060
11061/* Opcode 0xf2 0x0f 0xf2 - invalid */
11062
11063/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
11064FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
11065{
11066 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11067 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
11068}
11069
11070
11071/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
11072FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
11073{
11074 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11075 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
11076}
11077
11078/* Opcode 0xf2 0x0f 0xf3 - invalid */
11079
11080/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
11081FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
11082{
11083 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11084 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
11085}
11086
11087
11088/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
11089FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
11090{
11091 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11092 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
11093}
11094
11095
11096/* Opcode 0xf2 0x0f 0xf4 - invalid */
11097
11098/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
11099FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
11100{
11101 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, 0);
11102 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
11103}
11104
11105
11106/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
11107FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
11108{
11109 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
11110 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
11111}
11112
11113/* Opcode 0xf2 0x0f 0xf5 - invalid */
11114
11115/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
11116FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
11117{
11118 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_MMX, IEMOPHINT_IGNORES_OP_SIZES);
11119 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
11120}
11121
11122
11123/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
11124FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
11125{
11126 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
11127 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
11128}
11129
11130
11131/* Opcode 0xf2 0x0f 0xf6 - invalid */
11132
11133/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
11134FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
11135/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
11136FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
11137/* Opcode 0xf2 0x0f 0xf7 - invalid */
11138
11139
11140/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
11141FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
11142{
11143 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11144 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
11145}
11146
11147
11148/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
11149FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
11150{
11151 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11152 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
11153}
11154
11155
11156/* Opcode 0xf2 0x0f 0xf8 - invalid */
11157
11158
11159/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
11160FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
11161{
11162 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11163 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
11164}
11165
11166
11167/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
11168FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
11169{
11170 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11171 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
11172}
11173
11174
11175/* Opcode 0xf2 0x0f 0xf9 - invalid */
11176
11177
11178/** Opcode 0x0f 0xfa - psubd Pq, Qq */
11179FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
11180{
11181 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11182 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
11183}
11184
11185
11186/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
11187FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
11188{
11189 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11190 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
11191}
11192
11193
11194/* Opcode 0xf2 0x0f 0xfa - invalid */
11195
11196
11197/** Opcode 0x0f 0xfb - psubq Pq, Qq */
11198FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
11199{
11200 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11201 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex, iemAImpl_psubq_u64, IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2);
11202}
11203
11204
11205/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
11206FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
11207{
11208 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11209 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
11210}
11211
11212
11213/* Opcode 0xf2 0x0f 0xfb - invalid */
11214
11215
11216/** Opcode 0x0f 0xfc - paddb Pq, Qq */
11217FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
11218{
11219 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11220 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
11221}
11222
11223
11224/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
11225FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
11226{
11227 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11228 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
11229}
11230
11231
11232/* Opcode 0xf2 0x0f 0xfc - invalid */
11233
11234
11235/** Opcode 0x0f 0xfd - paddw Pq, Qq */
11236FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
11237{
11238 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11239 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
11240}
11241
11242
11243/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
11244FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
11245{
11246 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11247 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
11248}
11249
11250
11251/* Opcode 0xf2 0x0f 0xfd - invalid */
11252
11253
11254/** Opcode 0x0f 0xfe - paddd Pq, Qq */
11255FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
11256{
11257 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11258 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
11259}
11260
11261
11262/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
11263FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
11264{
11265 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
11266 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
11267}
11268
11269
11270/* Opcode 0xf2 0x0f 0xfe - invalid */
11271
11272
11273/** Opcode **** 0x0f 0xff - UD0 */
11274FNIEMOP_DEF(iemOp_ud0)
11275{
11276 IEMOP_MNEMONIC(ud0, "ud0");
11277 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
11278 {
11279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
11280#ifndef TST_IEM_CHECK_MC
11281 if (IEM_IS_MODRM_MEM_MODE(bRm))
11282 {
11283 RTGCPTR GCPtrEff;
11284 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
11285 if (rcStrict != VINF_SUCCESS)
11286 return rcStrict;
11287 }
11288#endif
11289 IEMOP_HLP_DONE_DECODING();
11290 }
11291 return IEMOP_RAISE_INVALID_OPCODE();
11292}
11293
11294
11295
11296/**
11297 * Two byte opcode map, first byte 0x0f.
11298 *
11299 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
11300 * check if it needs updating as well when making changes.
11301 */
11302IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
11303{
11304 /* no prefix, 066h prefix f3h prefix, f2h prefix */
11305 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
11306 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
11307 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
11308 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
11309 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
11310 /* 0x05 */ IEMOP_X4(iemOp_syscall),
11311 /* 0x06 */ IEMOP_X4(iemOp_clts),
11312 /* 0x07 */ IEMOP_X4(iemOp_sysret),
11313 /* 0x08 */ IEMOP_X4(iemOp_invd),
11314 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
11315 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
11316 /* 0x0b */ IEMOP_X4(iemOp_ud2),
11317 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
11318 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
11319 /* 0x0e */ IEMOP_X4(iemOp_femms),
11320 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
11321
11322 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
11323 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
11324 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
11325 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11326 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11327 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11328 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
11329 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11330 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
11331 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
11332 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
11333 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
11334 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
11335 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
11336 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
11337 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
11338
11339 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
11340 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
11341 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
11342 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
11343 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
11344 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11345 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
11346 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
11347 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11348 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11349 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
11350 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11351 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
11352 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
11353 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11354 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11355
11356 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
11357 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
11358 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
11359 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
11360 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
11361 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
11362 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
11363 /* 0x37 */ IEMOP_X4(iemOp_getsec),
11364 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
11365 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11366 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
11367 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11368 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11369 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
11370 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11371 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
11372
11373 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
11374 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
11375 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
11376 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
11377 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
11378 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
11379 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
11380 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
11381 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
11382 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
11383 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
11384 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
11385 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
11386 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
11387 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
11388 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
11389
11390 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11391 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
11392 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
11393 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
11394 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11395 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11396 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11397 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11398 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
11399 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
11400 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
11401 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
11402 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
11403 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
11404 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
11405 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
11406
11407 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11408 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11409 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11410 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11411 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11412 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11413 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11414 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11415 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11416 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11417 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11418 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11419 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11420 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11421 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11422 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
11423
11424 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
11425 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
11426 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
11427 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
11428 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11429 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11430 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11431 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11432
11433 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11434 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11435 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11436 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11437 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
11438 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
11439 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
11440 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
11441
11442 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
11443 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
11444 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
11445 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
11446 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
11447 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
11448 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
11449 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
11450 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
11451 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
11452 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
11453 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
11454 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
11455 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
11456 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
11457 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
11458
11459 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
11460 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
11461 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
11462 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
11463 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
11464 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
11465 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
11466 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
11467 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
11468 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
11469 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
11470 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
11471 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
11472 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
11473 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
11474 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
11475
11476 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
11477 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
11478 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
11479 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
11480 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
11481 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
11482 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
11483 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
11484 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
11485 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
11486 /* 0xaa */ IEMOP_X4(iemOp_rsm),
11487 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
11488 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
11489 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
11490 /* 0xae */ IEMOP_X4(iemOp_Grp15),
11491 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
11492
11493 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
11494 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
11495 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
11496 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
11497 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
11498 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
11499 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
11500 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
11501 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
11502 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
11503 /* 0xba */ IEMOP_X4(iemOp_Grp8),
11504 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
11505 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
11506 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
11507 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
11508 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
11509
11510 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
11511 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
11512 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
11513 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11514 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11515 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11516 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
11517 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
11518 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
11519 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
11520 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
11521 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
11522 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
11523 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
11524 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
11525 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
11526
11527 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
11528 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11529 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11530 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11531 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11532 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11533 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
11534 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11535 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11536 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11537 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11538 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11539 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11540 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11541 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11542 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11543
11544 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11545 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11546 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11547 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11548 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11549 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11550 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
11551 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11552 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11553 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11554 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11555 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11556 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11557 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11558 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11559 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11560
11561 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
11562 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11563 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11564 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11565 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11566 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11567 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11568 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11569 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11570 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11571 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11572 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11573 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11574 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11575 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
11576 /* 0xff */ IEMOP_X4(iemOp_ud0),
11577};
11578AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
11579
11580/** @} */
11581
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette