VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h@ 96034

Last change on this file since 96034 was 96034, checked in by vboxsync, 2 years ago

VMM/IEM: Implement [v]psign{b,w,d} instructions, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 65.9 KB
Line 
1/* $Id: IEMAllInstructionsThree0f38.cpp.h 96034 2022-08-04 20:00:12Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap2.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name Three byte opcodes with first two bytes 0x0f 0x38
23 * @{
24 */
25
26FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported); /* in IEMAllInstructionsTwoByteOf.cpp.h */
27
28
29/**
30 * Common worker for SSSE3 instructions on the forms:
31 * pxxx xmm1, xmm2/mem128
32 *
33 * Proper alignment of the 128-bit operand is enforced.
34 * Exceptions type 4. SSSE3 cpuid checks.
35 *
36 * @sa iemOpCommonSse2_FullFull_To_Full
37 */
38FNIEMOP_DEF_1(iemOpCommonSsse3_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
39{
40 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
41 if (IEM_IS_MODRM_REG_MODE(bRm))
42 {
43 /*
44 * Register, register.
45 */
46 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
47 IEM_MC_BEGIN(2, 0);
48 IEM_MC_ARG(PRTUINT128U, puDst, 0);
49 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
50 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
51 IEM_MC_PREPARE_SSE_USAGE();
52 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
53 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
54 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
55 IEM_MC_ADVANCE_RIP();
56 IEM_MC_END();
57 }
58 else
59 {
60 /*
61 * Register, memory.
62 */
63 IEM_MC_BEGIN(2, 2);
64 IEM_MC_ARG(PRTUINT128U, puDst, 0);
65 IEM_MC_LOCAL(RTUINT128U, uSrc);
66 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
70 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
71 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
72 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
73
74 IEM_MC_PREPARE_SSE_USAGE();
75 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
76 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
77
78 IEM_MC_ADVANCE_RIP();
79 IEM_MC_END();
80 }
81 return VINF_SUCCESS;
82}
83
84
85/**
86 * Common worker for SSE4.1 instructions on the forms:
87 * pxxx xmm1, xmm2/mem128
88 *
89 * Proper alignment of the 128-bit operand is enforced.
90 * Exceptions type 4. SSE4.1 cpuid checks.
91 *
92 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
93 * iemOpCommonSse42_FullFull_To_Full
94 */
95FNIEMOP_DEF_1(iemOpCommonSse41_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
96{
97 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
98 if (IEM_IS_MODRM_REG_MODE(bRm))
99 {
100 /*
101 * Register, register.
102 */
103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
104 IEM_MC_BEGIN(2, 0);
105 IEM_MC_ARG(PRTUINT128U, puDst, 0);
106 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
107 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
108 IEM_MC_PREPARE_SSE_USAGE();
109 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
110 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
111 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
112 IEM_MC_ADVANCE_RIP();
113 IEM_MC_END();
114 }
115 else
116 {
117 /*
118 * Register, memory.
119 */
120 IEM_MC_BEGIN(2, 2);
121 IEM_MC_ARG(PRTUINT128U, puDst, 0);
122 IEM_MC_LOCAL(RTUINT128U, uSrc);
123 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125
126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
128 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
129 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
130
131 IEM_MC_PREPARE_SSE_USAGE();
132 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
133 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
134
135 IEM_MC_ADVANCE_RIP();
136 IEM_MC_END();
137 }
138 return VINF_SUCCESS;
139}
140
141
142/**
143 * Common worker for SSE4.1 instructions on the forms:
144 * pxxx xmm1, xmm2/mem128
145 *
146 * Proper alignment of the 128-bit operand is enforced.
147 * Exceptions type 4. SSE4.1 cpuid checks.
148 *
149 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
150 * takes no FXSAVE state, just the operands.
151 *
152 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
153 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full
154 */
155FNIEMOP_DEF_1(iemOpCommonSse41Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
156{
157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
158 if (IEM_IS_MODRM_REG_MODE(bRm))
159 {
160 /*
161 * Register, register.
162 */
163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
164 IEM_MC_BEGIN(2, 0);
165 IEM_MC_ARG(PRTUINT128U, puDst, 0);
166 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
167 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
168 IEM_MC_PREPARE_SSE_USAGE();
169 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
170 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
171 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
172 IEM_MC_ADVANCE_RIP();
173 IEM_MC_END();
174 }
175 else
176 {
177 /*
178 * Register, memory.
179 */
180 IEM_MC_BEGIN(2, 2);
181 IEM_MC_ARG(PRTUINT128U, puDst, 0);
182 IEM_MC_LOCAL(RTUINT128U, uSrc);
183 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
185
186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
188 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
189 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
190
191 IEM_MC_PREPARE_SSE_USAGE();
192 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
193 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
194
195 IEM_MC_ADVANCE_RIP();
196 IEM_MC_END();
197 }
198 return VINF_SUCCESS;
199}
200
201
202/**
203 * Common worker for SSE4.2 instructions on the forms:
204 * pxxx xmm1, xmm2/mem128
205 *
206 * Proper alignment of the 128-bit operand is enforced.
207 * Exceptions type 4. SSE4.2 cpuid checks.
208 *
209 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
210 * iemOpCommonSse41_FullFull_To_Full
211 */
212FNIEMOP_DEF_1(iemOpCommonSse42_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
213{
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
215 if (IEM_IS_MODRM_REG_MODE(bRm))
216 {
217 /*
218 * Register, register.
219 */
220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
221 IEM_MC_BEGIN(2, 0);
222 IEM_MC_ARG(PRTUINT128U, puDst, 0);
223 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
224 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
225 IEM_MC_PREPARE_SSE_USAGE();
226 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
227 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
228 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
229 IEM_MC_ADVANCE_RIP();
230 IEM_MC_END();
231 }
232 else
233 {
234 /*
235 * Register, memory.
236 */
237 IEM_MC_BEGIN(2, 2);
238 IEM_MC_ARG(PRTUINT128U, puDst, 0);
239 IEM_MC_LOCAL(RTUINT128U, uSrc);
240 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
242
243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
245 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
246 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
247
248 IEM_MC_PREPARE_SSE_USAGE();
249 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
250 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
251
252 IEM_MC_ADVANCE_RIP();
253 IEM_MC_END();
254 }
255 return VINF_SUCCESS;
256}
257
258
259/** Opcode 0x0f 0x38 0x00. */
260FNIEMOP_DEF(iemOp_pshufb_Pq_Qq)
261{
262 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
263 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
264 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u64,&iemAImpl_pshufb_u64_fallback),
265 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
266}
267
268
269/** Opcode 0x66 0x0f 0x38 0x00. */
270FNIEMOP_DEF(iemOp_pshufb_Vx_Wx)
271{
272 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
273 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
274 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback));
275
276}
277
278
279/* Opcode 0x0f 0x38 0x01. */
280FNIEMOP_STUB(iemOp_phaddw_Pq_Qq);
281/** Opcode 0x66 0x0f 0x38 0x01. */
282FNIEMOP_STUB(iemOp_phaddw_Vx_Wx);
283/** Opcode 0x0f 0x38 0x02. */
284FNIEMOP_STUB(iemOp_phaddd_Pq_Qq);
285/** Opcode 0x66 0x0f 0x38 0x02. */
286FNIEMOP_STUB(iemOp_phaddd_Vx_Wx);
287/** Opcode 0x0f 0x38 0x03. */
288FNIEMOP_STUB(iemOp_phaddsw_Pq_Qq);
289/** Opcode 0x66 0x0f 0x38 0x03. */
290FNIEMOP_STUB(iemOp_phaddsw_Vx_Wx);
291/** Opcode 0x0f 0x38 0x04. */
292FNIEMOP_STUB(iemOp_pmaddubsw_Pq_Qq);
293/** Opcode 0x66 0x0f 0x38 0x04. */
294FNIEMOP_STUB(iemOp_pmaddubsw_Vx_Wx);
295/** Opcode 0x0f 0x38 0x05. */
296FNIEMOP_STUB(iemOp_phsubw_Pq_Qq);
297/** Opcode 0x66 0x0f 0x38 0x05. */
298FNIEMOP_STUB(iemOp_phsubw_Vx_Wx);
299/** Opcode 0x0f 0x38 0x06. */
300FNIEMOP_STUB(iemOp_phsubd_Pq_Qq);
301/** Opcode 0x66 0x0f 0x38 0x06. */
302FNIEMOP_STUB(iemOp_phsubdq_Vx_Wx);
303/** Opcode 0x0f 0x38 0x07. */
304FNIEMOP_STUB(iemOp_phsubsw_Pq_Qq);
305/** Opcode 0x66 0x0f 0x38 0x07. */
306FNIEMOP_STUB(iemOp_phsubsw_Vx_Wx);
307
308
309/** Opcode 0x0f 0x38 0x08. */
310FNIEMOP_DEF(iemOp_psignb_Pq_Qq)
311{
312 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
313 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
314 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u64, &iemAImpl_psignb_u64_fallback),
315 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
316}
317
318
319/** Opcode 0x66 0x0f 0x38 0x08. */
320FNIEMOP_DEF(iemOp_psignb_Vx_Wx)
321{
322 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
323 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
324 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback));
325
326}
327
328
329/** Opcode 0x0f 0x38 0x09. */
330FNIEMOP_DEF(iemOp_psignw_Pq_Qq)
331{
332 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
333 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
334 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u64, &iemAImpl_psignw_u64_fallback),
335 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
336}
337
338
339/** Opcode 0x66 0x0f 0x38 0x09. */
340FNIEMOP_DEF(iemOp_psignw_Vx_Wx)
341{
342 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
343 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
344 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback));
345
346}
347
348
349/** Opcode 0x0f 0x38 0x0a. */
350FNIEMOP_DEF(iemOp_psignd_Pq_Qq)
351{
352 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
353 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
354 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u64, &iemAImpl_psignd_u64_fallback),
355 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
356}
357
358
359/** Opcode 0x66 0x0f 0x38 0x0a. */
360FNIEMOP_DEF(iemOp_psignd_Vx_Wx)
361{
362 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
363 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
364 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback));
365
366}
367
368
369/** Opcode 0x0f 0x38 0x0b. */
370FNIEMOP_STUB(iemOp_pmulhrsw_Pq_Qq);
371/** Opcode 0x66 0x0f 0x38 0x0b. */
372FNIEMOP_STUB(iemOp_pmulhrsw_Vx_Wx);
373/* Opcode 0x0f 0x38 0x0c - invalid. */
374/* Opcode 0x66 0x0f 0x38 0x0c - invalid (vex only). */
375/* Opcode 0x0f 0x38 0x0d - invalid. */
376/* Opcode 0x66 0x0f 0x38 0x0d - invalid (vex only). */
377/* Opcode 0x0f 0x38 0x0e - invalid. */
378/* Opcode 0x66 0x0f 0x38 0x0e - invalid (vex only). */
379/* Opcode 0x0f 0x38 0x0f - invalid. */
380/* Opcode 0x66 0x0f 0x38 0x0f - invalid (vex only). */
381
382
383/* Opcode 0x0f 0x38 0x10 - invalid */
384/** Opcode 0x66 0x0f 0x38 0x10 (legacy only). */
385FNIEMOP_STUB(iemOp_pblendvb_Vdq_Wdq);
386/* Opcode 0x0f 0x38 0x11 - invalid */
387/* Opcode 0x66 0x0f 0x38 0x11 - invalid */
388/* Opcode 0x0f 0x38 0x12 - invalid */
389/* Opcode 0x66 0x0f 0x38 0x12 - invalid */
390/* Opcode 0x0f 0x38 0x13 - invalid */
391/* Opcode 0x66 0x0f 0x38 0x13 - invalid (vex only). */
392/* Opcode 0x0f 0x38 0x14 - invalid */
393/** Opcode 0x66 0x0f 0x38 0x14 (legacy only). */
394FNIEMOP_STUB(iemOp_blendvps_Vdq_Wdq);
395/* Opcode 0x0f 0x38 0x15 - invalid */
396/** Opcode 0x66 0x0f 0x38 0x15 (legacy only). */
397FNIEMOP_STUB(iemOp_blendvpd_Vdq_Wdq);
398/* Opcode 0x0f 0x38 0x16 - invalid */
399/* Opcode 0x66 0x0f 0x38 0x16 - invalid (vex only). */
400/* Opcode 0x0f 0x38 0x17 - invalid */
401
402
403/** Opcode 0x66 0x0f 0x38 0x17 - invalid */
404FNIEMOP_DEF(iemOp_ptest_Vx_Wx)
405{
406 IEMOP_MNEMONIC2(RM, PTEST, ptest, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
408 if (IEM_IS_MODRM_REG_MODE(bRm))
409 {
410 /*
411 * Register, register.
412 */
413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
414 IEM_MC_BEGIN(3, 0);
415 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
416 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
417 IEM_MC_ARG(uint32_t *, pEFlags, 2);
418 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
424 IEM_MC_ADVANCE_RIP();
425 IEM_MC_END();
426 }
427 else
428 {
429 /*
430 * Register, memory.
431 */
432 IEM_MC_BEGIN(3, 2);
433 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
434 IEM_MC_LOCAL(RTUINT128U, uSrc2);
435 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
436 IEM_MC_ARG(uint32_t *, pEFlags, 2);
437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
438
439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
441 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
442 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
443
444 IEM_MC_PREPARE_SSE_USAGE();
445 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
446 IEM_MC_REF_EFLAGS(pEFlags);
447 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
448
449 IEM_MC_ADVANCE_RIP();
450 IEM_MC_END();
451 }
452 return VINF_SUCCESS;
453}
454
455
456/* Opcode 0x0f 0x38 0x18 - invalid */
457/* Opcode 0x66 0x0f 0x38 0x18 - invalid (vex only). */
458/* Opcode 0x0f 0x38 0x19 - invalid */
459/* Opcode 0x66 0x0f 0x38 0x19 - invalid (vex only). */
460/* Opcode 0x0f 0x38 0x1a - invalid */
461/* Opcode 0x66 0x0f 0x38 0x1a - invalid (vex only). */
462/* Opcode 0x0f 0x38 0x1b - invalid */
463/* Opcode 0x66 0x0f 0x38 0x1b - invalid */
464
465
466/** Opcode 0x0f 0x38 0x1c. */
467FNIEMOP_DEF(iemOp_pabsb_Pq_Qq)
468{
469 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
470 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
471 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u64, &iemAImpl_pabsb_u64_fallback),
472 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
473}
474
475
476/** Opcode 0x66 0x0f 0x38 0x1c. */
477FNIEMOP_DEF(iemOp_pabsb_Vx_Wx)
478{
479 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
480 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
481 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback));
482
483}
484
485
486/** Opcode 0x0f 0x38 0x1d. */
487FNIEMOP_DEF(iemOp_pabsw_Pq_Qq)
488{
489 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
490 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
491 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u64, &iemAImpl_pabsw_u64_fallback),
492 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
493}
494
495
496/** Opcode 0x66 0x0f 0x38 0x1d. */
497FNIEMOP_DEF(iemOp_pabsw_Vx_Wx)
498{
499 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
500 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
501 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback));
502
503}
504
505
506/** Opcode 0x0f 0x38 0x1e. */
507FNIEMOP_DEF(iemOp_pabsd_Pq_Qq)
508{
509 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
510 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
511 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u64, &iemAImpl_pabsd_u64_fallback),
512 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
513}
514
515
516/** Opcode 0x66 0x0f 0x38 0x1e. */
517FNIEMOP_DEF(iemOp_pabsd_Vx_Wx)
518{
519 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
520 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
521 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback));
522
523}
524
525
526/* Opcode 0x0f 0x38 0x1f - invalid */
527/* Opcode 0x66 0x0f 0x38 0x1f - invalid */
528
529
530/** Opcode 0x66 0x0f 0x38 0x20. */
531FNIEMOP_STUB(iemOp_pmovsxbw_Vx_UxMq);
532/** Opcode 0x66 0x0f 0x38 0x21. */
533FNIEMOP_STUB(iemOp_pmovsxbd_Vx_UxMd);
534/** Opcode 0x66 0x0f 0x38 0x22. */
535FNIEMOP_STUB(iemOp_pmovsxbq_Vx_UxMw);
536/** Opcode 0x66 0x0f 0x38 0x23. */
537FNIEMOP_STUB(iemOp_pmovsxwd_Vx_UxMq);
538/** Opcode 0x66 0x0f 0x38 0x24. */
539FNIEMOP_STUB(iemOp_pmovsxwq_Vx_UxMd);
540/** Opcode 0x66 0x0f 0x38 0x25. */
541FNIEMOP_STUB(iemOp_pmovsxdq_Vx_UxMq);
542/* Opcode 0x66 0x0f 0x38 0x26 - invalid */
543/* Opcode 0x66 0x0f 0x38 0x27 - invalid */
544/** Opcode 0x66 0x0f 0x38 0x28. */
545FNIEMOP_STUB(iemOp_pmuldq_Vx_Wx);
546
547
548/** Opcode 0x66 0x0f 0x38 0x29. */
549FNIEMOP_DEF(iemOp_pcmpeqq_Vx_Wx)
550{
551 IEMOP_MNEMONIC2(RM, PCMPEQQ, pcmpeqq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
552 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
553 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback));
554}
555
556
557/**
558 * @opcode 0x2a
559 * @opcodesub !11 mr/reg
560 * @oppfx 0x66
561 * @opcpuid sse4.1
562 * @opgroup og_sse41_cachect
563 * @opxcpttype 1
564 * @optest op1=-1 op2=2 -> op1=2
565 * @optest op1=0 op2=-42 -> op1=-42
566 */
567FNIEMOP_DEF(iemOp_movntdqa_Vdq_Mdq)
568{
569 IEMOP_MNEMONIC2(RM_MEM, MOVNTDQA, movntdqa, Vdq_WO, Mdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
571 if (IEM_IS_MODRM_MEM_MODE(bRm))
572 {
573 /* Register, memory. */
574 IEM_MC_BEGIN(0, 2);
575 IEM_MC_LOCAL(RTUINT128U, uSrc);
576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
577
578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
580 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
581 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
582
583 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
584 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
585
586 IEM_MC_ADVANCE_RIP();
587 IEM_MC_END();
588 return VINF_SUCCESS;
589 }
590
591 /**
592 * @opdone
593 * @opmnemonic ud660f382areg
594 * @opcode 0x2a
595 * @opcodesub 11 mr/reg
596 * @oppfx 0x66
597 * @opunused immediate
598 * @opcpuid sse
599 * @optest ->
600 */
601 return IEMOP_RAISE_INVALID_OPCODE();
602}
603
604
605/** Opcode 0x66 0x0f 0x38 0x2b. */
606FNIEMOP_DEF(iemOp_packusdw_Vx_Wx)
607{
608 IEMOP_MNEMONIC2(RM, PACKUSDW, packusdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
609 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full, iemAImpl_packusdw_u128);
610}
611
612
613/* Opcode 0x66 0x0f 0x38 0x2c - invalid (vex only). */
614/* Opcode 0x66 0x0f 0x38 0x2d - invalid (vex only). */
615/* Opcode 0x66 0x0f 0x38 0x2e - invalid (vex only). */
616/* Opcode 0x66 0x0f 0x38 0x2f - invalid (vex only). */
617
618/** Opcode 0x66 0x0f 0x38 0x30. */
619FNIEMOP_STUB(iemOp_pmovzxbw_Vx_UxMq);
620/** Opcode 0x66 0x0f 0x38 0x31. */
621FNIEMOP_STUB(iemOp_pmovzxbd_Vx_UxMd);
622/** Opcode 0x66 0x0f 0x38 0x32. */
623FNIEMOP_STUB(iemOp_pmovzxbq_Vx_UxMw);
624/** Opcode 0x66 0x0f 0x38 0x33. */
625FNIEMOP_STUB(iemOp_pmovzxwd_Vx_UxMq);
626/** Opcode 0x66 0x0f 0x38 0x34. */
627FNIEMOP_STUB(iemOp_pmovzxwq_Vx_UxMd);
628/** Opcode 0x66 0x0f 0x38 0x35. */
629FNIEMOP_STUB(iemOp_pmovzxdq_Vx_UxMq);
630/* Opcode 0x66 0x0f 0x38 0x36 - invalid (vex only). */
631
632
633/** Opcode 0x66 0x0f 0x38 0x37. */
634FNIEMOP_DEF(iemOp_pcmpgtq_Vx_Wx)
635{
636 IEMOP_MNEMONIC2(RM, PCMPGTQ, pcmpgtq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
637 return FNIEMOP_CALL_1(iemOpCommonSse42_FullFull_To_Full,
638 IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback));
639}
640
641
642/** Opcode 0x66 0x0f 0x38 0x38. */
643FNIEMOP_DEF(iemOp_pminsb_Vx_Wx)
644{
645 IEMOP_MNEMONIC2(RM, PMINSB, pminsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
646 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
647 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback));
648}
649
650
651/** Opcode 0x66 0x0f 0x38 0x39. */
652FNIEMOP_DEF(iemOp_pminsd_Vx_Wx)
653{
654 IEMOP_MNEMONIC2(RM, PMINSD, pminsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
655 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
656 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback));
657}
658
659
660/** Opcode 0x66 0x0f 0x38 0x3a. */
661FNIEMOP_DEF(iemOp_pminuw_Vx_Wx)
662{
663 IEMOP_MNEMONIC2(RM, PMINUW, pminuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
664 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
665 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback));
666}
667
668
669/** Opcode 0x66 0x0f 0x38 0x3b. */
670FNIEMOP_DEF(iemOp_pminud_Vx_Wx)
671{
672 IEMOP_MNEMONIC2(RM, PMINUD, pminud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
673 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
674 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback));
675}
676
677
678/** Opcode 0x66 0x0f 0x38 0x3c. */
679FNIEMOP_DEF(iemOp_pmaxsb_Vx_Wx)
680{
681 IEMOP_MNEMONIC2(RM, PMAXSB, pmaxsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
682 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
683 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback));
684}
685
686
687/** Opcode 0x66 0x0f 0x38 0x3d. */
688FNIEMOP_DEF(iemOp_pmaxsd_Vx_Wx)
689{
690 IEMOP_MNEMONIC2(RM, PMAXSD, pmaxsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
691 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
692 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback));
693}
694
695
696/** Opcode 0x66 0x0f 0x38 0x3e. */
697FNIEMOP_DEF(iemOp_pmaxuw_Vx_Wx)
698{
699 IEMOP_MNEMONIC2(RM, PMAXUW, pmaxuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
700 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
701 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback));
702}
703
704
705/** Opcode 0x66 0x0f 0x38 0x3f. */
706FNIEMOP_DEF(iemOp_pmaxud_Vx_Wx)
707{
708 IEMOP_MNEMONIC2(RM, PMAXUD, pmaxud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
709 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
710 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback));
711}
712
713
714/** Opcode 0x66 0x0f 0x38 0x40. */
715FNIEMOP_DEF(iemOp_pmulld_Vx_Wx)
716{
717 IEMOP_MNEMONIC2(RM, PMULLD, pmulld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
718 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
719 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback));
720}
721
722
723/** Opcode 0x66 0x0f 0x38 0x41. */
724FNIEMOP_STUB(iemOp_phminposuw_Vdq_Wdq);
725/* Opcode 0x66 0x0f 0x38 0x42 - invalid. */
726/* Opcode 0x66 0x0f 0x38 0x43 - invalid. */
727/* Opcode 0x66 0x0f 0x38 0x44 - invalid. */
728/* Opcode 0x66 0x0f 0x38 0x45 - invalid (vex only). */
729/* Opcode 0x66 0x0f 0x38 0x46 - invalid (vex only). */
730/* Opcode 0x66 0x0f 0x38 0x47 - invalid (vex only). */
731/* Opcode 0x66 0x0f 0x38 0x48 - invalid. */
732/* Opcode 0x66 0x0f 0x38 0x49 - invalid. */
733/* Opcode 0x66 0x0f 0x38 0x4a - invalid. */
734/* Opcode 0x66 0x0f 0x38 0x4b - invalid. */
735/* Opcode 0x66 0x0f 0x38 0x4c - invalid. */
736/* Opcode 0x66 0x0f 0x38 0x4d - invalid. */
737/* Opcode 0x66 0x0f 0x38 0x4e - invalid. */
738/* Opcode 0x66 0x0f 0x38 0x4f - invalid. */
739
740/* Opcode 0x66 0x0f 0x38 0x50 - invalid. */
741/* Opcode 0x66 0x0f 0x38 0x51 - invalid. */
742/* Opcode 0x66 0x0f 0x38 0x52 - invalid. */
743/* Opcode 0x66 0x0f 0x38 0x53 - invalid. */
744/* Opcode 0x66 0x0f 0x38 0x54 - invalid. */
745/* Opcode 0x66 0x0f 0x38 0x55 - invalid. */
746/* Opcode 0x66 0x0f 0x38 0x56 - invalid. */
747/* Opcode 0x66 0x0f 0x38 0x57 - invalid. */
748/* Opcode 0x66 0x0f 0x38 0x58 - invalid (vex only). */
749/* Opcode 0x66 0x0f 0x38 0x59 - invalid (vex only). */
750/* Opcode 0x66 0x0f 0x38 0x5a - invalid (vex only). */
751/* Opcode 0x66 0x0f 0x38 0x5b - invalid. */
752/* Opcode 0x66 0x0f 0x38 0x5c - invalid. */
753/* Opcode 0x66 0x0f 0x38 0x5d - invalid. */
754/* Opcode 0x66 0x0f 0x38 0x5e - invalid. */
755/* Opcode 0x66 0x0f 0x38 0x5f - invalid. */
756
757/* Opcode 0x66 0x0f 0x38 0x60 - invalid. */
758/* Opcode 0x66 0x0f 0x38 0x61 - invalid. */
759/* Opcode 0x66 0x0f 0x38 0x62 - invalid. */
760/* Opcode 0x66 0x0f 0x38 0x63 - invalid. */
761/* Opcode 0x66 0x0f 0x38 0x64 - invalid. */
762/* Opcode 0x66 0x0f 0x38 0x65 - invalid. */
763/* Opcode 0x66 0x0f 0x38 0x66 - invalid. */
764/* Opcode 0x66 0x0f 0x38 0x67 - invalid. */
765/* Opcode 0x66 0x0f 0x38 0x68 - invalid. */
766/* Opcode 0x66 0x0f 0x38 0x69 - invalid. */
767/* Opcode 0x66 0x0f 0x38 0x6a - invalid. */
768/* Opcode 0x66 0x0f 0x38 0x6b - invalid. */
769/* Opcode 0x66 0x0f 0x38 0x6c - invalid. */
770/* Opcode 0x66 0x0f 0x38 0x6d - invalid. */
771/* Opcode 0x66 0x0f 0x38 0x6e - invalid. */
772/* Opcode 0x66 0x0f 0x38 0x6f - invalid. */
773
774/* Opcode 0x66 0x0f 0x38 0x70 - invalid. */
775/* Opcode 0x66 0x0f 0x38 0x71 - invalid. */
776/* Opcode 0x66 0x0f 0x38 0x72 - invalid. */
777/* Opcode 0x66 0x0f 0x38 0x73 - invalid. */
778/* Opcode 0x66 0x0f 0x38 0x74 - invalid. */
779/* Opcode 0x66 0x0f 0x38 0x75 - invalid. */
780/* Opcode 0x66 0x0f 0x38 0x76 - invalid. */
781/* Opcode 0x66 0x0f 0x38 0x77 - invalid. */
782/* Opcode 0x66 0x0f 0x38 0x78 - invalid (vex only). */
783/* Opcode 0x66 0x0f 0x38 0x79 - invalid (vex only). */
784/* Opcode 0x66 0x0f 0x38 0x7a - invalid. */
785/* Opcode 0x66 0x0f 0x38 0x7b - invalid. */
786/* Opcode 0x66 0x0f 0x38 0x7c - invalid. */
787/* Opcode 0x66 0x0f 0x38 0x7d - invalid. */
788/* Opcode 0x66 0x0f 0x38 0x7e - invalid. */
789/* Opcode 0x66 0x0f 0x38 0x7f - invalid. */
790
791/** Opcode 0x66 0x0f 0x38 0x80. */
792#ifdef VBOX_WITH_NESTED_HWVIRT_VMX_EPT
793FNIEMOP_DEF(iemOp_invept_Gy_Mdq)
794{
795 IEMOP_MNEMONIC(invept, "invept Gy,Mdq");
796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
797 IEMOP_HLP_IN_VMX_OPERATION("invept", kVmxVDiag_Invept);
798 IEMOP_HLP_VMX_INSTR("invept", kVmxVDiag_Invept);
799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
800 if (IEM_IS_MODRM_MEM_MODE(bRm))
801 {
802 /* Register, memory. */
803 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
804 {
805 IEM_MC_BEGIN(3, 0);
806 IEM_MC_ARG(uint8_t, iEffSeg, 0);
807 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
808 IEM_MC_ARG(uint64_t, uInveptType, 2);
809 IEM_MC_FETCH_GREG_U64(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
811 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
812 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
813 IEM_MC_END();
814 }
815 else
816 {
817 IEM_MC_BEGIN(3, 0);
818 IEM_MC_ARG(uint8_t, iEffSeg, 0);
819 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
820 IEM_MC_ARG(uint32_t, uInveptType, 2);
821 IEM_MC_FETCH_GREG_U32(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
823 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
824 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
825 IEM_MC_END();
826 }
827 }
828 Log(("iemOp_invept_Gy_Mdq: invalid encoding -> #UD\n"));
829 return IEMOP_RAISE_INVALID_OPCODE();
830}
831#else
832FNIEMOP_STUB(iemOp_invept_Gy_Mdq);
833#endif
834
835/** Opcode 0x66 0x0f 0x38 0x81. */
836#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
837FNIEMOP_DEF(iemOp_invvpid_Gy_Mdq)
838{
839 IEMOP_MNEMONIC(invvpid, "invvpid Gy,Mdq");
840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
841 IEMOP_HLP_IN_VMX_OPERATION("invvpid", kVmxVDiag_Invvpid);
842 IEMOP_HLP_VMX_INSTR("invvpid", kVmxVDiag_Invvpid);
843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
844 if (IEM_IS_MODRM_MEM_MODE(bRm))
845 {
846 /* Register, memory. */
847 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
848 {
849 IEM_MC_BEGIN(3, 0);
850 IEM_MC_ARG(uint8_t, iEffSeg, 0);
851 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
852 IEM_MC_ARG(uint64_t, uInvvpidType, 2);
853 IEM_MC_FETCH_GREG_U64(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
855 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
856 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
857 IEM_MC_END();
858 }
859 else
860 {
861 IEM_MC_BEGIN(3, 0);
862 IEM_MC_ARG(uint8_t, iEffSeg, 0);
863 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
864 IEM_MC_ARG(uint32_t, uInvvpidType, 2);
865 IEM_MC_FETCH_GREG_U32(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
867 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
868 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
869 IEM_MC_END();
870 }
871 }
872 Log(("iemOp_invvpid_Gy_Mdq: invalid encoding -> #UD\n"));
873 return IEMOP_RAISE_INVALID_OPCODE();
874}
875#else
876FNIEMOP_STUB(iemOp_invvpid_Gy_Mdq);
877#endif
878
879/** Opcode 0x66 0x0f 0x38 0x82. */
880FNIEMOP_DEF(iemOp_invpcid_Gy_Mdq)
881{
882 IEMOP_MNEMONIC(invpcid, "invpcid Gy,Mdq");
883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
885 if (IEM_IS_MODRM_MEM_MODE(bRm))
886 {
887 /* Register, memory. */
888 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
889 {
890 IEM_MC_BEGIN(3, 0);
891 IEM_MC_ARG(uint8_t, iEffSeg, 0);
892 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
893 IEM_MC_ARG(uint64_t, uInvpcidType, 2);
894 IEM_MC_FETCH_GREG_U64(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
896 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
897 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
898 IEM_MC_END();
899 }
900 else
901 {
902 IEM_MC_BEGIN(3, 0);
903 IEM_MC_ARG(uint8_t, iEffSeg, 0);
904 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
905 IEM_MC_ARG(uint32_t, uInvpcidType, 2);
906 IEM_MC_FETCH_GREG_U32(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
908 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
909 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
910 IEM_MC_END();
911 }
912 }
913 Log(("iemOp_invpcid_Gy_Mdq: invalid encoding -> #UD\n"));
914 return IEMOP_RAISE_INVALID_OPCODE();
915}
916
917
918/* Opcode 0x66 0x0f 0x38 0x83 - invalid. */
919/* Opcode 0x66 0x0f 0x38 0x84 - invalid. */
920/* Opcode 0x66 0x0f 0x38 0x85 - invalid. */
921/* Opcode 0x66 0x0f 0x38 0x86 - invalid. */
922/* Opcode 0x66 0x0f 0x38 0x87 - invalid. */
923/* Opcode 0x66 0x0f 0x38 0x88 - invalid. */
924/* Opcode 0x66 0x0f 0x38 0x89 - invalid. */
925/* Opcode 0x66 0x0f 0x38 0x8a - invalid. */
926/* Opcode 0x66 0x0f 0x38 0x8b - invalid. */
927/* Opcode 0x66 0x0f 0x38 0x8c - invalid (vex only). */
928/* Opcode 0x66 0x0f 0x38 0x8d - invalid. */
929/* Opcode 0x66 0x0f 0x38 0x8e - invalid (vex only). */
930/* Opcode 0x66 0x0f 0x38 0x8f - invalid. */
931
932/* Opcode 0x66 0x0f 0x38 0x90 - invalid (vex only). */
933/* Opcode 0x66 0x0f 0x38 0x91 - invalid (vex only). */
934/* Opcode 0x66 0x0f 0x38 0x92 - invalid (vex only). */
935/* Opcode 0x66 0x0f 0x38 0x93 - invalid (vex only). */
936/* Opcode 0x66 0x0f 0x38 0x94 - invalid. */
937/* Opcode 0x66 0x0f 0x38 0x95 - invalid. */
938/* Opcode 0x66 0x0f 0x38 0x96 - invalid (vex only). */
939/* Opcode 0x66 0x0f 0x38 0x97 - invalid (vex only). */
940/* Opcode 0x66 0x0f 0x38 0x98 - invalid (vex only). */
941/* Opcode 0x66 0x0f 0x38 0x99 - invalid (vex only). */
942/* Opcode 0x66 0x0f 0x38 0x9a - invalid (vex only). */
943/* Opcode 0x66 0x0f 0x38 0x9b - invalid (vex only). */
944/* Opcode 0x66 0x0f 0x38 0x9c - invalid (vex only). */
945/* Opcode 0x66 0x0f 0x38 0x9d - invalid (vex only). */
946/* Opcode 0x66 0x0f 0x38 0x9e - invalid (vex only). */
947/* Opcode 0x66 0x0f 0x38 0x9f - invalid (vex only). */
948
949/* Opcode 0x66 0x0f 0x38 0xa0 - invalid. */
950/* Opcode 0x66 0x0f 0x38 0xa1 - invalid. */
951/* Opcode 0x66 0x0f 0x38 0xa2 - invalid. */
952/* Opcode 0x66 0x0f 0x38 0xa3 - invalid. */
953/* Opcode 0x66 0x0f 0x38 0xa4 - invalid. */
954/* Opcode 0x66 0x0f 0x38 0xa5 - invalid. */
955/* Opcode 0x66 0x0f 0x38 0xa6 - invalid (vex only). */
956/* Opcode 0x66 0x0f 0x38 0xa7 - invalid (vex only). */
957/* Opcode 0x66 0x0f 0x38 0xa8 - invalid (vex only). */
958/* Opcode 0x66 0x0f 0x38 0xa9 - invalid (vex only). */
959/* Opcode 0x66 0x0f 0x38 0xaa - invalid (vex only). */
960/* Opcode 0x66 0x0f 0x38 0xab - invalid (vex only). */
961/* Opcode 0x66 0x0f 0x38 0xac - invalid (vex only). */
962/* Opcode 0x66 0x0f 0x38 0xad - invalid (vex only). */
963/* Opcode 0x66 0x0f 0x38 0xae - invalid (vex only). */
964/* Opcode 0x66 0x0f 0x38 0xaf - invalid (vex only). */
965
966/* Opcode 0x66 0x0f 0x38 0xb0 - invalid. */
967/* Opcode 0x66 0x0f 0x38 0xb1 - invalid. */
968/* Opcode 0x66 0x0f 0x38 0xb2 - invalid. */
969/* Opcode 0x66 0x0f 0x38 0xb3 - invalid. */
970/* Opcode 0x66 0x0f 0x38 0xb4 - invalid. */
971/* Opcode 0x66 0x0f 0x38 0xb5 - invalid. */
972/* Opcode 0x66 0x0f 0x38 0xb6 - invalid (vex only). */
973/* Opcode 0x66 0x0f 0x38 0xb7 - invalid (vex only). */
974/* Opcode 0x66 0x0f 0x38 0xb8 - invalid (vex only). */
975/* Opcode 0x66 0x0f 0x38 0xb9 - invalid (vex only). */
976/* Opcode 0x66 0x0f 0x38 0xba - invalid (vex only). */
977/* Opcode 0x66 0x0f 0x38 0xbb - invalid (vex only). */
978/* Opcode 0x66 0x0f 0x38 0xbc - invalid (vex only). */
979/* Opcode 0x66 0x0f 0x38 0xbd - invalid (vex only). */
980/* Opcode 0x66 0x0f 0x38 0xbe - invalid (vex only). */
981/* Opcode 0x66 0x0f 0x38 0xbf - invalid (vex only). */
982
983/* Opcode 0x0f 0x38 0xc0 - invalid. */
984/* Opcode 0x66 0x0f 0x38 0xc0 - invalid. */
985/* Opcode 0x0f 0x38 0xc1 - invalid. */
986/* Opcode 0x66 0x0f 0x38 0xc1 - invalid. */
987/* Opcode 0x0f 0x38 0xc2 - invalid. */
988/* Opcode 0x66 0x0f 0x38 0xc2 - invalid. */
989/* Opcode 0x0f 0x38 0xc3 - invalid. */
990/* Opcode 0x66 0x0f 0x38 0xc3 - invalid. */
991/* Opcode 0x0f 0x38 0xc4 - invalid. */
992/* Opcode 0x66 0x0f 0x38 0xc4 - invalid. */
993/* Opcode 0x0f 0x38 0xc5 - invalid. */
994/* Opcode 0x66 0x0f 0x38 0xc5 - invalid. */
995/* Opcode 0x0f 0x38 0xc6 - invalid. */
996/* Opcode 0x66 0x0f 0x38 0xc6 - invalid. */
997/* Opcode 0x0f 0x38 0xc7 - invalid. */
998/* Opcode 0x66 0x0f 0x38 0xc7 - invalid. */
999/** Opcode 0x0f 0x38 0xc8. */
1000FNIEMOP_STUB(iemOp_sha1nexte_Vdq_Wdq);
1001/* Opcode 0x66 0x0f 0x38 0xc8 - invalid. */
1002/** Opcode 0x0f 0x38 0xc9. */
1003FNIEMOP_STUB(iemOp_sha1msg1_Vdq_Wdq);
1004/* Opcode 0x66 0x0f 0x38 0xc9 - invalid. */
1005/** Opcode 0x0f 0x38 0xca. */
1006FNIEMOP_STUB(iemOp_sha1msg2_Vdq_Wdq);
1007/* Opcode 0x66 0x0f 0x38 0xca - invalid. */
1008/** Opcode 0x0f 0x38 0xcb. */
1009FNIEMOP_STUB(iemOp_sha256rnds2_Vdq_Wdq);
1010/* Opcode 0x66 0x0f 0x38 0xcb - invalid. */
1011/** Opcode 0x0f 0x38 0xcc. */
1012FNIEMOP_STUB(iemOp_sha256msg1_Vdq_Wdq);
1013/* Opcode 0x66 0x0f 0x38 0xcc - invalid. */
1014/** Opcode 0x0f 0x38 0xcd. */
1015FNIEMOP_STUB(iemOp_sha256msg2_Vdq_Wdq);
1016/* Opcode 0x66 0x0f 0x38 0xcd - invalid. */
1017/* Opcode 0x0f 0x38 0xce - invalid. */
1018/* Opcode 0x66 0x0f 0x38 0xce - invalid. */
1019/* Opcode 0x0f 0x38 0xcf - invalid. */
1020/* Opcode 0x66 0x0f 0x38 0xcf - invalid. */
1021
1022/* Opcode 0x66 0x0f 0x38 0xd0 - invalid. */
1023/* Opcode 0x66 0x0f 0x38 0xd1 - invalid. */
1024/* Opcode 0x66 0x0f 0x38 0xd2 - invalid. */
1025/* Opcode 0x66 0x0f 0x38 0xd3 - invalid. */
1026/* Opcode 0x66 0x0f 0x38 0xd4 - invalid. */
1027/* Opcode 0x66 0x0f 0x38 0xd5 - invalid. */
1028/* Opcode 0x66 0x0f 0x38 0xd6 - invalid. */
1029/* Opcode 0x66 0x0f 0x38 0xd7 - invalid. */
1030/* Opcode 0x66 0x0f 0x38 0xd8 - invalid. */
1031/* Opcode 0x66 0x0f 0x38 0xd9 - invalid. */
1032/* Opcode 0x66 0x0f 0x38 0xda - invalid. */
1033/** Opcode 0x66 0x0f 0x38 0xdb. */
1034FNIEMOP_STUB(iemOp_aesimc_Vdq_Wdq);
1035/** Opcode 0x66 0x0f 0x38 0xdc. */
1036FNIEMOP_STUB(iemOp_aesenc_Vdq_Wdq);
1037/** Opcode 0x66 0x0f 0x38 0xdd. */
1038FNIEMOP_STUB(iemOp_aesenclast_Vdq_Wdq);
1039/** Opcode 0x66 0x0f 0x38 0xde. */
1040FNIEMOP_STUB(iemOp_aesdec_Vdq_Wdq);
1041/** Opcode 0x66 0x0f 0x38 0xdf. */
1042FNIEMOP_STUB(iemOp_aesdeclast_Vdq_Wdq);
1043
1044/* Opcode 0x66 0x0f 0x38 0xe0 - invalid. */
1045/* Opcode 0x66 0x0f 0x38 0xe1 - invalid. */
1046/* Opcode 0x66 0x0f 0x38 0xe2 - invalid. */
1047/* Opcode 0x66 0x0f 0x38 0xe3 - invalid. */
1048/* Opcode 0x66 0x0f 0x38 0xe4 - invalid. */
1049/* Opcode 0x66 0x0f 0x38 0xe5 - invalid. */
1050/* Opcode 0x66 0x0f 0x38 0xe6 - invalid. */
1051/* Opcode 0x66 0x0f 0x38 0xe7 - invalid. */
1052/* Opcode 0x66 0x0f 0x38 0xe8 - invalid. */
1053/* Opcode 0x66 0x0f 0x38 0xe9 - invalid. */
1054/* Opcode 0x66 0x0f 0x38 0xea - invalid. */
1055/* Opcode 0x66 0x0f 0x38 0xeb - invalid. */
1056/* Opcode 0x66 0x0f 0x38 0xec - invalid. */
1057/* Opcode 0x66 0x0f 0x38 0xed - invalid. */
1058/* Opcode 0x66 0x0f 0x38 0xee - invalid. */
1059/* Opcode 0x66 0x0f 0x38 0xef - invalid. */
1060
1061
1062/** Opcode 0x0f 0x38 0xf0. */
1063FNIEMOP_STUB(iemOp_movbe_Gy_My);
1064/** Opcode 0x66 0x0f 0x38 0xf0. */
1065FNIEMOP_STUB(iemOp_movbe_Gw_Mw);
1066/* Opcode 0xf3 0x0f 0x38 0xf0 - invalid. */
1067
1068
1069/** Opcode 0xf2 0x0f 0x38 0xf0. */
1070FNIEMOP_DEF(iemOp_crc32_Gd_Eb)
1071{
1072 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Eb, DISOPTYPE_HARMLESS, 0);
1073 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1074 return iemOp_InvalidNeedRM(pVCpu);
1075
1076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1077 if (IEM_IS_MODRM_REG_MODE(bRm))
1078 {
1079 /*
1080 * Register, register.
1081 */
1082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1083 IEM_MC_BEGIN(2, 0);
1084 IEM_MC_ARG(uint32_t *, puDst, 0);
1085 IEM_MC_ARG(uint8_t, uSrc, 1);
1086 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1087 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1088 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1089 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 }
1093 else
1094 {
1095 /*
1096 * Register, memory.
1097 */
1098 IEM_MC_BEGIN(2, 1);
1099 IEM_MC_ARG(uint32_t *, puDst, 0);
1100 IEM_MC_ARG(uint8_t, uSrc, 1);
1101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1102
1103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1105 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1106
1107 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1108 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1109 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1110
1111 IEM_MC_ADVANCE_RIP();
1112 IEM_MC_END();
1113 }
1114 return VINF_SUCCESS;
1115}
1116
1117
1118/** Opcode 0x0f 0x38 0xf1. */
1119FNIEMOP_STUB(iemOp_movbe_My_Gy);
1120/** Opcode 0x66 0x0f 0x38 0xf1. */
1121FNIEMOP_STUB(iemOp_movbe_Mw_Gw);
1122/* Opcode 0xf3 0x0f 0x38 0xf1 - invalid. */
1123
1124
1125/** Opcode 0xf2 0x0f 0x38 0xf1. */
1126FNIEMOP_DEF(iemOp_crc32_Gv_Ev)
1127{
1128 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Ev, DISOPTYPE_HARMLESS, 0);
1129 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1130 return iemOp_InvalidNeedRM(pVCpu);
1131
1132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1133 if (IEM_IS_MODRM_REG_MODE(bRm))
1134 {
1135 /*
1136 * Register, register.
1137 */
1138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1139 switch (pVCpu->iem.s.enmEffOpSize)
1140 {
1141 case IEMMODE_16BIT:
1142 IEM_MC_BEGIN(2, 0);
1143 IEM_MC_ARG(uint32_t *, puDst, 0);
1144 IEM_MC_ARG(uint16_t, uSrc, 1);
1145 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1146 IEM_MC_FETCH_GREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1147 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1148 puDst, uSrc);
1149 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1150 IEM_MC_ADVANCE_RIP();
1151 IEM_MC_END();
1152 return VINF_SUCCESS;
1153
1154 case IEMMODE_32BIT:
1155 IEM_MC_BEGIN(2, 0);
1156 IEM_MC_ARG(uint32_t *, puDst, 0);
1157 IEM_MC_ARG(uint32_t, uSrc, 1);
1158 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1159 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1160 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1161 puDst, uSrc);
1162 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1163 IEM_MC_ADVANCE_RIP();
1164 IEM_MC_END();
1165 return VINF_SUCCESS;
1166
1167 case IEMMODE_64BIT:
1168 IEM_MC_BEGIN(2, 0);
1169 IEM_MC_ARG(uint32_t *, puDst, 0);
1170 IEM_MC_ARG(uint64_t, uSrc, 1);
1171 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1172 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1173 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1174 puDst, uSrc);
1175 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1176 IEM_MC_ADVANCE_RIP();
1177 IEM_MC_END();
1178 return VINF_SUCCESS;
1179
1180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1181 }
1182 }
1183 else
1184 {
1185 /*
1186 * Register, memory.
1187 */
1188 switch (pVCpu->iem.s.enmEffOpSize)
1189 {
1190 case IEMMODE_16BIT:
1191 IEM_MC_BEGIN(2, 1);
1192 IEM_MC_ARG(uint32_t *, puDst, 0);
1193 IEM_MC_ARG(uint16_t, uSrc, 1);
1194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1195
1196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1198 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1199
1200 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1201 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1202 puDst, uSrc);
1203 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1204
1205 IEM_MC_ADVANCE_RIP();
1206 IEM_MC_END();
1207 return VINF_SUCCESS;
1208
1209 case IEMMODE_32BIT:
1210 IEM_MC_BEGIN(2, 1);
1211 IEM_MC_ARG(uint32_t *, puDst, 0);
1212 IEM_MC_ARG(uint32_t, uSrc, 1);
1213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1214
1215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1217 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1218
1219 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1220 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1221 puDst, uSrc);
1222 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1223
1224 IEM_MC_ADVANCE_RIP();
1225 IEM_MC_END();
1226 return VINF_SUCCESS;
1227
1228 case IEMMODE_64BIT:
1229 IEM_MC_BEGIN(2, 1);
1230 IEM_MC_ARG(uint32_t *, puDst, 0);
1231 IEM_MC_ARG(uint64_t, uSrc, 1);
1232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1233
1234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1236 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1237
1238 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1239 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1240 puDst, uSrc);
1241 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1242
1243 IEM_MC_ADVANCE_RIP();
1244 IEM_MC_END();
1245 return VINF_SUCCESS;
1246
1247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1248 }
1249 }
1250}
1251
1252
1253/* Opcode 0x0f 0x38 0xf2 - invalid (vex only). */
1254/* Opcode 0x66 0x0f 0x38 0xf2 - invalid. */
1255/* Opcode 0xf3 0x0f 0x38 0xf2 - invalid. */
1256/* Opcode 0xf2 0x0f 0x38 0xf2 - invalid. */
1257
1258/* Opcode 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1259/* Opcode 0x66 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1260/* Opcode 0xf3 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1261/* Opcode 0xf2 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1262
1263/* Opcode 0x0f 0x38 0xf4 - invalid. */
1264/* Opcode 0x66 0x0f 0x38 0xf4 - invalid. */
1265/* Opcode 0xf3 0x0f 0x38 0xf4 - invalid. */
1266/* Opcode 0xf2 0x0f 0x38 0xf4 - invalid. */
1267
1268/* Opcode 0x0f 0x38 0xf5 - invalid (vex only). */
1269/* Opcode 0x66 0x0f 0x38 0xf5 - invalid. */
1270/* Opcode 0xf3 0x0f 0x38 0xf5 - invalid (vex only). */
1271/* Opcode 0xf2 0x0f 0x38 0xf5 - invalid (vex only). */
1272
1273/* Opcode 0x0f 0x38 0xf6 - invalid. */
1274/** Opcode 0x66 0x0f 0x38 0xf6. */
1275FNIEMOP_STUB(iemOp_adcx_Gy_Ey);
1276/** Opcode 0xf3 0x0f 0x38 0xf6. */
1277FNIEMOP_STUB(iemOp_adox_Gy_Ey);
1278/* Opcode 0xf2 0x0f 0x38 0xf6 - invalid (vex only). */
1279
1280/* Opcode 0x0f 0x38 0xf7 - invalid (vex only). */
1281/* Opcode 0x66 0x0f 0x38 0xf7 - invalid (vex only). */
1282/* Opcode 0xf3 0x0f 0x38 0xf7 - invalid (vex only). */
1283/* Opcode 0xf2 0x0f 0x38 0xf7 - invalid (vex only). */
1284
1285/* Opcode 0x0f 0x38 0xf8 - invalid. */
1286/* Opcode 0x66 0x0f 0x38 0xf8 - invalid. */
1287/* Opcode 0xf3 0x0f 0x38 0xf8 - invalid. */
1288/* Opcode 0xf2 0x0f 0x38 0xf8 - invalid. */
1289
1290/* Opcode 0x0f 0x38 0xf9 - invalid. */
1291/* Opcode 0x66 0x0f 0x38 0xf9 - invalid. */
1292/* Opcode 0xf3 0x0f 0x38 0xf9 - invalid. */
1293/* Opcode 0xf2 0x0f 0x38 0xf9 - invalid. */
1294
1295/* Opcode 0x0f 0x38 0xfa - invalid. */
1296/* Opcode 0x66 0x0f 0x38 0xfa - invalid. */
1297/* Opcode 0xf3 0x0f 0x38 0xfa - invalid. */
1298/* Opcode 0xf2 0x0f 0x38 0xfa - invalid. */
1299
1300/* Opcode 0x0f 0x38 0xfb - invalid. */
1301/* Opcode 0x66 0x0f 0x38 0xfb - invalid. */
1302/* Opcode 0xf3 0x0f 0x38 0xfb - invalid. */
1303/* Opcode 0xf2 0x0f 0x38 0xfb - invalid. */
1304
1305/* Opcode 0x0f 0x38 0xfc - invalid. */
1306/* Opcode 0x66 0x0f 0x38 0xfc - invalid. */
1307/* Opcode 0xf3 0x0f 0x38 0xfc - invalid. */
1308/* Opcode 0xf2 0x0f 0x38 0xfc - invalid. */
1309
1310/* Opcode 0x0f 0x38 0xfd - invalid. */
1311/* Opcode 0x66 0x0f 0x38 0xfd - invalid. */
1312/* Opcode 0xf3 0x0f 0x38 0xfd - invalid. */
1313/* Opcode 0xf2 0x0f 0x38 0xfd - invalid. */
1314
1315/* Opcode 0x0f 0x38 0xfe - invalid. */
1316/* Opcode 0x66 0x0f 0x38 0xfe - invalid. */
1317/* Opcode 0xf3 0x0f 0x38 0xfe - invalid. */
1318/* Opcode 0xf2 0x0f 0x38 0xfe - invalid. */
1319
1320/* Opcode 0x0f 0x38 0xff - invalid. */
1321/* Opcode 0x66 0x0f 0x38 0xff - invalid. */
1322/* Opcode 0xf3 0x0f 0x38 0xff - invalid. */
1323/* Opcode 0xf2 0x0f 0x38 0xff - invalid. */
1324
1325
1326/**
1327 * Three byte opcode map, first two bytes are 0x0f 0x38.
1328 * @sa g_apfnVexMap2
1329 */
1330IEM_STATIC const PFNIEMOP g_apfnThreeByte0f38[] =
1331{
1332 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1333 /* 0x00 */ iemOp_pshufb_Pq_Qq, iemOp_pshufb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1334 /* 0x01 */ iemOp_phaddw_Pq_Qq, iemOp_phaddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1335 /* 0x02 */ iemOp_phaddd_Pq_Qq, iemOp_phaddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1336 /* 0x03 */ iemOp_phaddsw_Pq_Qq, iemOp_phaddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1337 /* 0x04 */ iemOp_pmaddubsw_Pq_Qq, iemOp_pmaddubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1338 /* 0x05 */ iemOp_phsubw_Pq_Qq, iemOp_phsubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1339 /* 0x06 */ iemOp_phsubd_Pq_Qq, iemOp_phsubdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1340 /* 0x07 */ iemOp_phsubsw_Pq_Qq, iemOp_phsubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1341 /* 0x08 */ iemOp_psignb_Pq_Qq, iemOp_psignb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1342 /* 0x09 */ iemOp_psignw_Pq_Qq, iemOp_psignw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1343 /* 0x0a */ iemOp_psignd_Pq_Qq, iemOp_psignd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1344 /* 0x0b */ iemOp_pmulhrsw_Pq_Qq, iemOp_pmulhrsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1345 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
1346 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
1347 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
1348 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
1349
1350 /* 0x10 */ iemOp_InvalidNeedRM, iemOp_pblendvb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1351 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
1352 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
1353 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
1354 /* 0x14 */ iemOp_InvalidNeedRM, iemOp_blendvps_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1355 /* 0x15 */ iemOp_InvalidNeedRM, iemOp_blendvpd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1356 /* 0x16 */ IEMOP_X4(iemOp_InvalidNeedRM),
1357 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_ptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1358 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
1359 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
1360 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
1361 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
1362 /* 0x1c */ iemOp_pabsb_Pq_Qq, iemOp_pabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1363 /* 0x1d */ iemOp_pabsw_Pq_Qq, iemOp_pabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1364 /* 0x1e */ iemOp_pabsd_Pq_Qq, iemOp_pabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1365 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
1366
1367 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_pmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1368 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_pmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1369 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_pmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1370 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_pmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1371 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_pmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1372 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_pmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1373 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
1374 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
1375 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_pmuldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1376 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_pcmpeqq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1377 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_movntdqa_Vdq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1378 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_packusdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1379 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRM),
1380 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRM),
1381 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRM),
1382 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRM),
1383
1384 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_pmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1385 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_pmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1386 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_pmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1387 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_pmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1388 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_pmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1389 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_pmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1390 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
1391 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_pcmpgtq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1392 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_pminsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1393 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_pminsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1394 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_pminuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1395 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_pminud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1396 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_pmaxsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1397 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_pmaxsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1398 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_pmaxuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1399 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_pmaxud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1400
1401 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_pmulld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1402 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_phminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1403 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
1404 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
1405 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
1406 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
1407 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
1408 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
1409 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
1410 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
1411 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
1412 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
1413 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
1414 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
1415 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
1416 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
1417
1418 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
1419 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
1420 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
1421 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
1422 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
1423 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
1424 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
1425 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
1426 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRM),
1427 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRM),
1428 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRM),
1429 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
1430 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
1431 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
1432 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
1433 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
1434
1435 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
1436 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
1437 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
1438 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
1439 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
1440 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
1441 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
1442 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
1443 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
1444 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
1445 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
1446 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
1447 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
1448 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
1449 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
1450 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
1451
1452 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
1453 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
1454 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
1455 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
1456 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
1457 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
1458 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
1459 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
1460 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
1461 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
1462 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
1463 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
1464 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
1465 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
1466 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
1467 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
1468
1469 /* 0x80 */ iemOp_InvalidNeedRM, iemOp_invept_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1470 /* 0x81 */ iemOp_InvalidNeedRM, iemOp_invvpid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1471 /* 0x82 */ iemOp_InvalidNeedRM, iemOp_invpcid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1472 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
1473 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
1474 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
1475 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
1476 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
1477 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
1478 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
1479 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
1480 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
1481 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
1482 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
1483 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
1484 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
1485
1486 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
1487 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
1488 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
1489 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
1490 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
1491 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
1492 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
1493 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
1494 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
1495 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
1496 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
1497 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
1498 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
1499 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
1500 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
1501 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
1502
1503 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1504 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1505 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1506 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1507 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1508 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1509 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1510 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1511 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1512 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1513 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
1514 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
1515 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
1516 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
1517 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRM),
1518 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
1519
1520 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1521 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1522 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1523 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1524 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1525 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1526 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1527 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1528 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1529 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1530 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
1531 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
1532 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
1533 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
1534 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
1535 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
1536
1537 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1538 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1539 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1540 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1541 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1542 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1543 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1544 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1545 /* 0xc8 */ iemOp_sha1nexte_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1546 /* 0xc9 */ iemOp_sha1msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1547 /* 0xca */ iemOp_sha1msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1548 /* 0xcb */ iemOp_sha256rnds2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1549 /* 0xcc */ iemOp_sha256msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1550 /* 0xcd */ iemOp_sha256msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1551 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
1552 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
1553
1554 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1555 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1556 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1557 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1558 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1559 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1560 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1561 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1562 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1563 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1564 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
1565 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_aesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1566 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_aesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1567 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_aesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1568 /* 0xde */ iemOp_InvalidNeedRM, iemOp_aesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1569 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_aesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1570
1571 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1572 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1573 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1574 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1575 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1576 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1577 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1578 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1579 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1580 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1581 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
1582 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
1583 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
1584 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
1585 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
1586 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
1587
1588 /* 0xf0 */ iemOp_movbe_Gy_My, iemOp_movbe_Gw_Mw, iemOp_InvalidNeedRM, iemOp_crc32_Gd_Eb,
1589 /* 0xf1 */ iemOp_movbe_My_Gy, iemOp_movbe_Mw_Gw, iemOp_InvalidNeedRM, iemOp_crc32_Gv_Ev,
1590 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1591 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1592 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1593 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1594 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_adcx_Gy_Ey, iemOp_adox_Gy_Ey, iemOp_InvalidNeedRM,
1595 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1596 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1597 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1598 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
1599 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
1600 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
1601 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
1602 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
1603 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
1604};
1605AssertCompile(RT_ELEMENTS(g_apfnThreeByte0f38) == 1024);
1606
1607/** @} */
1608
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette