VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h@ 97356

Last change on this file since 97356 was 97356, checked in by vboxsync, 2 years ago

VMM/IEM: IEM_MC_ADVANCE_RIP -> IEM_MC_ADVANCE_RIP_AND_FINISH in prep for TF and other #DB stuff. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 83.5 KB
Line 
1/* $Id: IEMAllInstructionsThree0f38.cpp.h 97356 2022-10-31 22:36:29Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap2.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Three byte opcodes with first two bytes 0x0f 0x38
33 * @{
34 */
35
36FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported); /* in IEMAllInstructionsTwoByteOf.cpp.h */
37
38
39/**
40 * Common worker for SSSE3 instructions on the forms:
41 * pxxx xmm1, xmm2/mem128
42 *
43 * Proper alignment of the 128-bit operand is enforced.
44 * Exceptions type 4. SSSE3 cpuid checks.
45 *
46 * @sa iemOpCommonSse2_FullFull_To_Full
47 */
48FNIEMOP_DEF_1(iemOpCommonSsse3_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
49{
50 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
51 if (IEM_IS_MODRM_REG_MODE(bRm))
52 {
53 /*
54 * Register, register.
55 */
56 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
57 IEM_MC_BEGIN(2, 0);
58 IEM_MC_ARG(PRTUINT128U, puDst, 0);
59 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
60 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
61 IEM_MC_PREPARE_SSE_USAGE();
62 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
63 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(PRTUINT128U, puDst, 0);
75 IEM_MC_LOCAL(RTUINT128U, uSrc);
76 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_SSE_USAGE();
85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
87
88 IEM_MC_ADVANCE_RIP_AND_FINISH();
89 IEM_MC_END();
90 }
91 return VINF_SUCCESS;
92}
93
94
95/**
96 * Common worker for SSE4.1 instructions on the forms:
97 * pxxx xmm1, xmm2/mem128
98 *
99 * Proper alignment of the 128-bit operand is enforced.
100 * Exceptions type 4. SSE4.1 cpuid checks.
101 *
102 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
103 * iemOpCommonSse42_FullFull_To_Full
104 */
105FNIEMOP_DEF_1(iemOpCommonSse41_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
106{
107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
108 if (IEM_IS_MODRM_REG_MODE(bRm))
109 {
110 /*
111 * Register, register.
112 */
113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
114 IEM_MC_BEGIN(2, 0);
115 IEM_MC_ARG(PRTUINT128U, puDst, 0);
116 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
117 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
118 IEM_MC_PREPARE_SSE_USAGE();
119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
120 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
121 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
122 IEM_MC_ADVANCE_RIP_AND_FINISH();
123 IEM_MC_END();
124 }
125 else
126 {
127 /*
128 * Register, memory.
129 */
130 IEM_MC_BEGIN(2, 2);
131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
132 IEM_MC_LOCAL(RTUINT128U, uSrc);
133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
135
136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
138 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
139 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
140
141 IEM_MC_PREPARE_SSE_USAGE();
142 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
143 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
144
145 IEM_MC_ADVANCE_RIP_AND_FINISH();
146 IEM_MC_END();
147 }
148 return VINF_SUCCESS;
149}
150
151
152/**
153 * Common worker for SSE4.1 instructions on the forms:
154 * pxxx xmm1, xmm2/mem128
155 *
156 * Proper alignment of the 128-bit operand is enforced.
157 * Exceptions type 4. SSE4.1 cpuid checks.
158 *
159 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
160 * takes no FXSAVE state, just the operands.
161 *
162 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
163 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full
164 */
165FNIEMOP_DEF_1(iemOpCommonSse41Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
166{
167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
168 if (IEM_IS_MODRM_REG_MODE(bRm))
169 {
170 /*
171 * Register, register.
172 */
173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
174 IEM_MC_BEGIN(2, 0);
175 IEM_MC_ARG(PRTUINT128U, puDst, 0);
176 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
177 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
178 IEM_MC_PREPARE_SSE_USAGE();
179 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
180 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
181 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(2, 2);
191 IEM_MC_ARG(PRTUINT128U, puDst, 0);
192 IEM_MC_LOCAL(RTUINT128U, uSrc);
193 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
195
196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
198 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
199 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200
201 IEM_MC_PREPARE_SSE_USAGE();
202 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
203 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
204
205 IEM_MC_ADVANCE_RIP_AND_FINISH();
206 IEM_MC_END();
207 }
208 return VINF_SUCCESS;
209}
210
211
212/**
213 * Common worker for SSE4.2 instructions on the forms:
214 * pxxx xmm1, xmm2/mem128
215 *
216 * Proper alignment of the 128-bit operand is enforced.
217 * Exceptions type 4. SSE4.2 cpuid checks.
218 *
219 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
220 * iemOpCommonSse41_FullFull_To_Full
221 */
222FNIEMOP_DEF_1(iemOpCommonSse42_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
223{
224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
225 if (IEM_IS_MODRM_REG_MODE(bRm))
226 {
227 /*
228 * Register, register.
229 */
230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
231 IEM_MC_BEGIN(2, 0);
232 IEM_MC_ARG(PRTUINT128U, puDst, 0);
233 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
234 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
235 IEM_MC_PREPARE_SSE_USAGE();
236 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
237 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
238 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
239 IEM_MC_ADVANCE_RIP_AND_FINISH();
240 IEM_MC_END();
241 }
242 else
243 {
244 /*
245 * Register, memory.
246 */
247 IEM_MC_BEGIN(2, 2);
248 IEM_MC_ARG(PRTUINT128U, puDst, 0);
249 IEM_MC_LOCAL(RTUINT128U, uSrc);
250 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
252
253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
255 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
256 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
257
258 IEM_MC_PREPARE_SSE_USAGE();
259 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
260 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
261
262 IEM_MC_ADVANCE_RIP_AND_FINISH();
263 IEM_MC_END();
264 }
265 return VINF_SUCCESS;
266}
267
268
269/**
270 * Common worker for SSE-style AES-NI instructions of the form:
271 * aesxxx xmm1, xmm2/mem128
272 *
273 * Proper alignment of the 128-bit operand is enforced.
274 * Exceptions type 4. AES-NI cpuid checks.
275 *
276 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
277 * takes no FXSAVE state, just the operands.
278 *
279 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
280 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full
281 */
282FNIEMOP_DEF_1(iemOpCommonAesNi_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
283{
284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
285 if (IEM_IS_MODRM_REG_MODE(bRm))
286 {
287 /*
288 * Register, register.
289 */
290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
291 IEM_MC_BEGIN(2, 0);
292 IEM_MC_ARG(PRTUINT128U, puDst, 0);
293 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
294 IEM_MC_MAYBE_RAISE_AESNI_RELATED_XCPT();
295 IEM_MC_PREPARE_SSE_USAGE();
296 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
297 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
298 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
299 IEM_MC_ADVANCE_RIP_AND_FINISH();
300 IEM_MC_END();
301 }
302 else
303 {
304 /*
305 * Register, memory.
306 */
307 IEM_MC_BEGIN(2, 2);
308 IEM_MC_ARG(PRTUINT128U, puDst, 0);
309 IEM_MC_LOCAL(RTUINT128U, uSrc);
310 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
312
313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
315 IEM_MC_MAYBE_RAISE_AESNI_RELATED_XCPT();
316 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
317
318 IEM_MC_PREPARE_SSE_USAGE();
319 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
320 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
321
322 IEM_MC_ADVANCE_RIP_AND_FINISH();
323 IEM_MC_END();
324 }
325 return VINF_SUCCESS;
326}
327
328
329/** Opcode 0x0f 0x38 0x00. */
330FNIEMOP_DEF(iemOp_pshufb_Pq_Qq)
331{
332 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
333 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
334 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u64,&iemAImpl_pshufb_u64_fallback),
335 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
336}
337
338
339/** Opcode 0x66 0x0f 0x38 0x00. */
340FNIEMOP_DEF(iemOp_pshufb_Vx_Wx)
341{
342 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
343 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
344 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback));
345
346}
347
348
349/* Opcode 0x0f 0x38 0x01. */
350FNIEMOP_DEF(iemOp_phaddw_Pq_Qq)
351{
352 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
353 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
354 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u64,&iemAImpl_phaddw_u64_fallback),
355 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
356}
357
358
359/** Opcode 0x66 0x0f 0x38 0x01. */
360FNIEMOP_DEF(iemOp_phaddw_Vx_Wx)
361{
362 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
363 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
364 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback));
365
366}
367
368
369/** Opcode 0x0f 0x38 0x02. */
370FNIEMOP_DEF(iemOp_phaddd_Pq_Qq)
371{
372 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
373 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
374 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u64,&iemAImpl_phaddd_u64_fallback),
375 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
376}
377
378
379/** Opcode 0x66 0x0f 0x38 0x02. */
380FNIEMOP_DEF(iemOp_phaddd_Vx_Wx)
381{
382 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
383 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
384 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback));
385
386}
387
388
389/** Opcode 0x0f 0x38 0x03. */
390FNIEMOP_DEF(iemOp_phaddsw_Pq_Qq)
391{
392 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
393 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
394 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u64,&iemAImpl_phaddsw_u64_fallback),
395 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
396}
397
398
399/** Opcode 0x66 0x0f 0x38 0x03. */
400FNIEMOP_DEF(iemOp_phaddsw_Vx_Wx)
401{
402 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
403 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
404 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback));
405
406}
407
408
409/** Opcode 0x0f 0x38 0x04. */
410FNIEMOP_DEF(iemOp_pmaddubsw_Pq_Qq)
411{
412 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
413 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
414 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u64, &iemAImpl_pmaddubsw_u64_fallback),
415 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
416}
417
418
419/** Opcode 0x66 0x0f 0x38 0x04. */
420FNIEMOP_DEF(iemOp_pmaddubsw_Vx_Wx)
421{
422 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
423 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
424 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback));
425
426}
427
428
429/** Opcode 0x0f 0x38 0x05. */
430FNIEMOP_DEF(iemOp_phsubw_Pq_Qq)
431{
432 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
433 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
434 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u64,&iemAImpl_phsubw_u64_fallback),
435 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
436}
437
438
439/** Opcode 0x66 0x0f 0x38 0x05. */
440FNIEMOP_DEF(iemOp_phsubw_Vx_Wx)
441{
442 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
443 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
444 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback));
445
446}
447
448
449/** Opcode 0x0f 0x38 0x06. */
450FNIEMOP_DEF(iemOp_phsubd_Pq_Qq)
451{
452 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
453 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
454 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u64,&iemAImpl_phsubd_u64_fallback),
455 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
456}
457
458
459
460/** Opcode 0x66 0x0f 0x38 0x06. */
461FNIEMOP_DEF(iemOp_phsubd_Vx_Wx)
462{
463 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
464 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
465 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback));
466
467}
468
469
470/** Opcode 0x0f 0x38 0x07. */
471FNIEMOP_DEF(iemOp_phsubsw_Pq_Qq)
472{
473 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
474 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
475 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u64,&iemAImpl_phsubsw_u64_fallback),
476 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
477}
478
479
480/** Opcode 0x66 0x0f 0x38 0x07. */
481FNIEMOP_DEF(iemOp_phsubsw_Vx_Wx)
482{
483 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
484 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
485 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback));
486
487}
488
489
490/** Opcode 0x0f 0x38 0x08. */
491FNIEMOP_DEF(iemOp_psignb_Pq_Qq)
492{
493 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
494 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
495 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u64, &iemAImpl_psignb_u64_fallback),
496 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
497}
498
499
500/** Opcode 0x66 0x0f 0x38 0x08. */
501FNIEMOP_DEF(iemOp_psignb_Vx_Wx)
502{
503 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
504 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
505 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback));
506
507}
508
509
510/** Opcode 0x0f 0x38 0x09. */
511FNIEMOP_DEF(iemOp_psignw_Pq_Qq)
512{
513 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
514 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
515 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u64, &iemAImpl_psignw_u64_fallback),
516 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
517}
518
519
520/** Opcode 0x66 0x0f 0x38 0x09. */
521FNIEMOP_DEF(iemOp_psignw_Vx_Wx)
522{
523 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
524 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
525 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback));
526
527}
528
529
530/** Opcode 0x0f 0x38 0x0a. */
531FNIEMOP_DEF(iemOp_psignd_Pq_Qq)
532{
533 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
534 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
535 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u64, &iemAImpl_psignd_u64_fallback),
536 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
537}
538
539
540/** Opcode 0x66 0x0f 0x38 0x0a. */
541FNIEMOP_DEF(iemOp_psignd_Vx_Wx)
542{
543 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
544 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
545 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback));
546
547}
548
549
550/** Opcode 0x0f 0x38 0x0b. */
551FNIEMOP_DEF(iemOp_pmulhrsw_Pq_Qq)
552{
553 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
554 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
555 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u64, &iemAImpl_pmulhrsw_u64_fallback),
556 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
557}
558
559
560/** Opcode 0x66 0x0f 0x38 0x0b. */
561FNIEMOP_DEF(iemOp_pmulhrsw_Vx_Wx)
562{
563 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
564 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
565 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback));
566
567}
568
569
570/* Opcode 0x0f 0x38 0x0c - invalid. */
571/* Opcode 0x66 0x0f 0x38 0x0c - invalid (vex only). */
572/* Opcode 0x0f 0x38 0x0d - invalid. */
573/* Opcode 0x66 0x0f 0x38 0x0d - invalid (vex only). */
574/* Opcode 0x0f 0x38 0x0e - invalid. */
575/* Opcode 0x66 0x0f 0x38 0x0e - invalid (vex only). */
576/* Opcode 0x0f 0x38 0x0f - invalid. */
577/* Opcode 0x66 0x0f 0x38 0x0f - invalid (vex only). */
578
579
580/* Opcode 0x0f 0x38 0x10 - invalid */
581
582
583/** Body for the *blend* instructions. */
584#define IEMOP_BODY_P_BLEND_X(a_Instr) \
585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
586 if (IEM_IS_MODRM_REG_MODE(bRm)) \
587 { \
588 /* \
589 * Register, register. \
590 */ \
591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
592 IEM_MC_BEGIN(3, 0); \
593 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
594 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
595 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
596 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
597 IEM_MC_PREPARE_SSE_USAGE(); \
598 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
599 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
600 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
601 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
602 iemAImpl_ ## a_Instr ## _u128, \
603 iemAImpl_ ## a_Instr ## _u128_fallback), \
604 puDst, puSrc, puMask); \
605 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
606 IEM_MC_END(); \
607 } \
608 else \
609 { \
610 /* \
611 * Register, memory. \
612 */ \
613 IEM_MC_BEGIN(3, 2); \
614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
615 IEM_MC_LOCAL(RTUINT128U, uSrc); \
616 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
617 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
618 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
621 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
622 IEM_MC_PREPARE_SSE_USAGE(); \
623 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
624 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
625 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
626 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
627 iemAImpl_ ## a_Instr ## _u128, \
628 iemAImpl_ ## a_Instr ## _u128_fallback), \
629 puDst, puSrc, puMask); \
630 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
631 IEM_MC_END(); \
632 } \
633 return VINF_SUCCESS
634
635/** Opcode 0x66 0x0f 0x38 0x10 (legacy only). */
636FNIEMOP_DEF(iemOp_pblendvb_Vdq_Wdq)
637{
638 IEMOP_MNEMONIC2(RM, PBLENDVB, pblendvb, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
639 IEMOP_BODY_P_BLEND_X(pblendvb);
640}
641
642
643/* Opcode 0x0f 0x38 0x11 - invalid */
644/* Opcode 0x66 0x0f 0x38 0x11 - invalid */
645/* Opcode 0x0f 0x38 0x12 - invalid */
646/* Opcode 0x66 0x0f 0x38 0x12 - invalid */
647/* Opcode 0x0f 0x38 0x13 - invalid */
648/* Opcode 0x66 0x0f 0x38 0x13 - invalid (vex only). */
649/* Opcode 0x0f 0x38 0x14 - invalid */
650
651
652/** Opcode 0x66 0x0f 0x38 0x14 (legacy only). */
653FNIEMOP_DEF(iemOp_blendvps_Vdq_Wdq)
654{
655 IEMOP_MNEMONIC2(RM, BLENDVPS, blendvps, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
656 IEMOP_BODY_P_BLEND_X(blendvps);
657}
658
659
660/* Opcode 0x0f 0x38 0x15 - invalid */
661
662
663/** Opcode 0x66 0x0f 0x38 0x15 (legacy only). */
664FNIEMOP_DEF(iemOp_blendvpd_Vdq_Wdq)
665{
666 IEMOP_MNEMONIC2(RM, BLENDVPD, blendvpd, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
667 IEMOP_BODY_P_BLEND_X(blendvpd);
668}
669
670
671/* Opcode 0x0f 0x38 0x16 - invalid */
672/* Opcode 0x66 0x0f 0x38 0x16 - invalid (vex only). */
673/* Opcode 0x0f 0x38 0x17 - invalid */
674
675
676/** Opcode 0x66 0x0f 0x38 0x17 - invalid */
677FNIEMOP_DEF(iemOp_ptest_Vx_Wx)
678{
679 IEMOP_MNEMONIC2(RM, PTEST, ptest, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
681 if (IEM_IS_MODRM_REG_MODE(bRm))
682 {
683 /*
684 * Register, register.
685 */
686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
687 IEM_MC_BEGIN(3, 0);
688 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
689 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
690 IEM_MC_ARG(uint32_t *, pEFlags, 2);
691 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
692 IEM_MC_PREPARE_SSE_USAGE();
693 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
694 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
695 IEM_MC_REF_EFLAGS(pEFlags);
696 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
697 IEM_MC_ADVANCE_RIP_AND_FINISH();
698 IEM_MC_END();
699 }
700 else
701 {
702 /*
703 * Register, memory.
704 */
705 IEM_MC_BEGIN(3, 2);
706 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
707 IEM_MC_LOCAL(RTUINT128U, uSrc2);
708 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
709 IEM_MC_ARG(uint32_t *, pEFlags, 2);
710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
711
712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
714 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
715 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
716
717 IEM_MC_PREPARE_SSE_USAGE();
718 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
719 IEM_MC_REF_EFLAGS(pEFlags);
720 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
721
722 IEM_MC_ADVANCE_RIP_AND_FINISH();
723 IEM_MC_END();
724 }
725 return VINF_SUCCESS;
726}
727
728
729/* Opcode 0x0f 0x38 0x18 - invalid */
730/* Opcode 0x66 0x0f 0x38 0x18 - invalid (vex only). */
731/* Opcode 0x0f 0x38 0x19 - invalid */
732/* Opcode 0x66 0x0f 0x38 0x19 - invalid (vex only). */
733/* Opcode 0x0f 0x38 0x1a - invalid */
734/* Opcode 0x66 0x0f 0x38 0x1a - invalid (vex only). */
735/* Opcode 0x0f 0x38 0x1b - invalid */
736/* Opcode 0x66 0x0f 0x38 0x1b - invalid */
737
738
739/** Opcode 0x0f 0x38 0x1c. */
740FNIEMOP_DEF(iemOp_pabsb_Pq_Qq)
741{
742 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
743 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
744 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u64, &iemAImpl_pabsb_u64_fallback),
745 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
746}
747
748
749/** Opcode 0x66 0x0f 0x38 0x1c. */
750FNIEMOP_DEF(iemOp_pabsb_Vx_Wx)
751{
752 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
753 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
754 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback));
755
756}
757
758
759/** Opcode 0x0f 0x38 0x1d. */
760FNIEMOP_DEF(iemOp_pabsw_Pq_Qq)
761{
762 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
763 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
764 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u64, &iemAImpl_pabsw_u64_fallback),
765 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
766}
767
768
769/** Opcode 0x66 0x0f 0x38 0x1d. */
770FNIEMOP_DEF(iemOp_pabsw_Vx_Wx)
771{
772 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
773 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
774 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback));
775
776}
777
778
779/** Opcode 0x0f 0x38 0x1e. */
780FNIEMOP_DEF(iemOp_pabsd_Pq_Qq)
781{
782 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
783 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
784 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u64, &iemAImpl_pabsd_u64_fallback),
785 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
786}
787
788
789/** Opcode 0x66 0x0f 0x38 0x1e. */
790FNIEMOP_DEF(iemOp_pabsd_Vx_Wx)
791{
792 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
793 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
794 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback));
795
796}
797
798
799/* Opcode 0x0f 0x38 0x1f - invalid */
800/* Opcode 0x66 0x0f 0x38 0x1f - invalid */
801
802
803/** Body for the pmov{s,z}x* instructions. */
804#define IEMOP_BODY_PMOV_S_Z(a_Instr, a_SrcWidth) \
805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
806 if (IEM_IS_MODRM_REG_MODE(bRm)) \
807 { \
808 /* \
809 * Register, register. \
810 */ \
811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
812 IEM_MC_BEGIN(2, 0); \
813 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
814 IEM_MC_ARG(uint64_t, uSrc, 1); \
815 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
816 IEM_MC_PREPARE_SSE_USAGE(); \
817 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
818 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
819 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
820 iemAImpl_ ## a_Instr ## _u128, \
821 iemAImpl_v ## a_Instr ## _u128_fallback), \
822 puDst, uSrc); \
823 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
824 IEM_MC_END(); \
825 } \
826 else \
827 { \
828 /* \
829 * Register, memory. \
830 */ \
831 IEM_MC_BEGIN(2, 2); \
832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
833 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
834 IEM_MC_ARG(uint ## a_SrcWidth ## _t, uSrc, 1); \
835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
837 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
838 IEM_MC_PREPARE_SSE_USAGE(); \
839 IEM_MC_FETCH_MEM_U## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
840 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
841 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
842 iemAImpl_ ## a_Instr ## _u128, \
843 iemAImpl_v ## a_Instr ## _u128_fallback), \
844 puDst, uSrc); \
845 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
846 IEM_MC_END(); \
847 } \
848 return VINF_SUCCESS
849
850
851/** Opcode 0x66 0x0f 0x38 0x20. */
852FNIEMOP_DEF(iemOp_pmovsxbw_Vx_UxMq)
853{
854 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
855 IEMOP_MNEMONIC2(RM, PMOVSXBW, pmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
856 IEMOP_BODY_PMOV_S_Z(pmovsxbw, 64);
857}
858
859
860/** Opcode 0x66 0x0f 0x38 0x21. */
861FNIEMOP_DEF(iemOp_pmovsxbd_Vx_UxMd)
862{
863 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
864 IEMOP_MNEMONIC2(RM, PMOVSXBD, pmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
865 IEMOP_BODY_PMOV_S_Z(pmovsxbd, 32);
866}
867
868
869/** Opcode 0x66 0x0f 0x38 0x22. */
870FNIEMOP_DEF(iemOp_pmovsxbq_Vx_UxMw)
871{
872 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
873 IEMOP_MNEMONIC2(RM, PMOVSXBQ, pmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
874 IEMOP_BODY_PMOV_S_Z(pmovsxbq, 16);
875}
876
877
878/** Opcode 0x66 0x0f 0x38 0x23. */
879FNIEMOP_DEF(iemOp_pmovsxwd_Vx_UxMq)
880{
881 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
882 IEMOP_MNEMONIC2(RM, PMOVSXWD, pmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
883 IEMOP_BODY_PMOV_S_Z(pmovsxwd, 64);
884}
885
886
887/** Opcode 0x66 0x0f 0x38 0x24. */
888FNIEMOP_DEF(iemOp_pmovsxwq_Vx_UxMd)
889{
890 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
891 IEMOP_MNEMONIC2(RM, PMOVSXWQ, pmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
892 IEMOP_BODY_PMOV_S_Z(pmovsxwq, 32);
893}
894
895
896/** Opcode 0x66 0x0f 0x38 0x25. */
897FNIEMOP_DEF(iemOp_pmovsxdq_Vx_UxMq)
898{
899 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
900 IEMOP_MNEMONIC2(RM, PMOVSXDQ, pmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
901 IEMOP_BODY_PMOV_S_Z(pmovsxdq, 64);
902}
903
904
905/* Opcode 0x66 0x0f 0x38 0x26 - invalid */
906/* Opcode 0x66 0x0f 0x38 0x27 - invalid */
907
908
909/** Opcode 0x66 0x0f 0x38 0x28. */
910FNIEMOP_DEF(iemOp_pmuldq_Vx_Wx)
911{
912 IEMOP_MNEMONIC2(RM, PMULDQ, pmuldq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
913 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
914 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback));
915}
916
917
918/** Opcode 0x66 0x0f 0x38 0x29. */
919FNIEMOP_DEF(iemOp_pcmpeqq_Vx_Wx)
920{
921 IEMOP_MNEMONIC2(RM, PCMPEQQ, pcmpeqq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
922 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
923 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback));
924}
925
926
927/**
928 * @opcode 0x2a
929 * @opcodesub !11 mr/reg
930 * @oppfx 0x66
931 * @opcpuid sse4.1
932 * @opgroup og_sse41_cachect
933 * @opxcpttype 1
934 * @optest op1=-1 op2=2 -> op1=2
935 * @optest op1=0 op2=-42 -> op1=-42
936 */
937FNIEMOP_DEF(iemOp_movntdqa_Vdq_Mdq)
938{
939 IEMOP_MNEMONIC2(RM_MEM, MOVNTDQA, movntdqa, Vdq_WO, Mdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
941 if (IEM_IS_MODRM_MEM_MODE(bRm))
942 {
943 /* Register, memory. */
944 IEM_MC_BEGIN(0, 2);
945 IEM_MC_LOCAL(RTUINT128U, uSrc);
946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
947
948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
950 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
951 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
952
953 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
954 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
955
956 IEM_MC_ADVANCE_RIP_AND_FINISH();
957 IEM_MC_END();
958 return VINF_SUCCESS;
959 }
960
961 /**
962 * @opdone
963 * @opmnemonic ud660f382areg
964 * @opcode 0x2a
965 * @opcodesub 11 mr/reg
966 * @oppfx 0x66
967 * @opunused immediate
968 * @opcpuid sse
969 * @optest ->
970 */
971 return IEMOP_RAISE_INVALID_OPCODE();
972}
973
974
975/** Opcode 0x66 0x0f 0x38 0x2b. */
976FNIEMOP_DEF(iemOp_packusdw_Vx_Wx)
977{
978 IEMOP_MNEMONIC2(RM, PACKUSDW, packusdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
979 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full, iemAImpl_packusdw_u128);
980}
981
982
983/* Opcode 0x66 0x0f 0x38 0x2c - invalid (vex only). */
984/* Opcode 0x66 0x0f 0x38 0x2d - invalid (vex only). */
985/* Opcode 0x66 0x0f 0x38 0x2e - invalid (vex only). */
986/* Opcode 0x66 0x0f 0x38 0x2f - invalid (vex only). */
987
988/** Opcode 0x66 0x0f 0x38 0x30. */
989FNIEMOP_DEF(iemOp_pmovzxbw_Vx_UxMq)
990{
991 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
992 IEMOP_MNEMONIC2(RM, PMOVZXBW, pmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
993 IEMOP_BODY_PMOV_S_Z(pmovzxbw, 64);
994}
995
996
997/** Opcode 0x66 0x0f 0x38 0x31. */
998FNIEMOP_DEF(iemOp_pmovzxbd_Vx_UxMd)
999{
1000 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1001 IEMOP_MNEMONIC2(RM, PMOVZXBD, pmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1002 IEMOP_BODY_PMOV_S_Z(pmovzxbd, 32);
1003}
1004
1005
1006/** Opcode 0x66 0x0f 0x38 0x32. */
1007FNIEMOP_DEF(iemOp_pmovzxbq_Vx_UxMw)
1008{
1009 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1010 IEMOP_MNEMONIC2(RM, PMOVZXBQ, pmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1011 IEMOP_BODY_PMOV_S_Z(pmovzxbq, 16);
1012}
1013
1014
1015/** Opcode 0x66 0x0f 0x38 0x33. */
1016FNIEMOP_DEF(iemOp_pmovzxwd_Vx_UxMq)
1017{
1018 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1019 IEMOP_MNEMONIC2(RM, PMOVZXWD, pmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1020 IEMOP_BODY_PMOV_S_Z(pmovzxwd, 64);
1021}
1022
1023
1024/** Opcode 0x66 0x0f 0x38 0x34. */
1025FNIEMOP_DEF(iemOp_pmovzxwq_Vx_UxMd)
1026{
1027 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1028 IEMOP_MNEMONIC2(RM, PMOVZXWQ, pmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1029 IEMOP_BODY_PMOV_S_Z(pmovzxwq, 32);
1030}
1031
1032
1033/** Opcode 0x66 0x0f 0x38 0x35. */
1034FNIEMOP_DEF(iemOp_pmovzxdq_Vx_UxMq)
1035{
1036 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1037 IEMOP_MNEMONIC2(RM, PMOVZXDQ, pmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1038 IEMOP_BODY_PMOV_S_Z(pmovzxdq, 64);
1039}
1040
1041
1042/* Opcode 0x66 0x0f 0x38 0x36 - invalid (vex only). */
1043
1044
1045/** Opcode 0x66 0x0f 0x38 0x37. */
1046FNIEMOP_DEF(iemOp_pcmpgtq_Vx_Wx)
1047{
1048 IEMOP_MNEMONIC2(RM, PCMPGTQ, pcmpgtq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1049 return FNIEMOP_CALL_1(iemOpCommonSse42_FullFull_To_Full,
1050 IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback));
1051}
1052
1053
1054/** Opcode 0x66 0x0f 0x38 0x38. */
1055FNIEMOP_DEF(iemOp_pminsb_Vx_Wx)
1056{
1057 IEMOP_MNEMONIC2(RM, PMINSB, pminsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1058 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1059 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback));
1060}
1061
1062
1063/** Opcode 0x66 0x0f 0x38 0x39. */
1064FNIEMOP_DEF(iemOp_pminsd_Vx_Wx)
1065{
1066 IEMOP_MNEMONIC2(RM, PMINSD, pminsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1067 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1068 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback));
1069}
1070
1071
1072/** Opcode 0x66 0x0f 0x38 0x3a. */
1073FNIEMOP_DEF(iemOp_pminuw_Vx_Wx)
1074{
1075 IEMOP_MNEMONIC2(RM, PMINUW, pminuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1076 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1077 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback));
1078}
1079
1080
1081/** Opcode 0x66 0x0f 0x38 0x3b. */
1082FNIEMOP_DEF(iemOp_pminud_Vx_Wx)
1083{
1084 IEMOP_MNEMONIC2(RM, PMINUD, pminud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1085 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1086 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback));
1087}
1088
1089
1090/** Opcode 0x66 0x0f 0x38 0x3c. */
1091FNIEMOP_DEF(iemOp_pmaxsb_Vx_Wx)
1092{
1093 IEMOP_MNEMONIC2(RM, PMAXSB, pmaxsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1094 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1095 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback));
1096}
1097
1098
1099/** Opcode 0x66 0x0f 0x38 0x3d. */
1100FNIEMOP_DEF(iemOp_pmaxsd_Vx_Wx)
1101{
1102 IEMOP_MNEMONIC2(RM, PMAXSD, pmaxsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1103 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1104 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback));
1105}
1106
1107
1108/** Opcode 0x66 0x0f 0x38 0x3e. */
1109FNIEMOP_DEF(iemOp_pmaxuw_Vx_Wx)
1110{
1111 IEMOP_MNEMONIC2(RM, PMAXUW, pmaxuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1112 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1113 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback));
1114}
1115
1116
1117/** Opcode 0x66 0x0f 0x38 0x3f. */
1118FNIEMOP_DEF(iemOp_pmaxud_Vx_Wx)
1119{
1120 IEMOP_MNEMONIC2(RM, PMAXUD, pmaxud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1121 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1122 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback));
1123}
1124
1125
1126/** Opcode 0x66 0x0f 0x38 0x40. */
1127FNIEMOP_DEF(iemOp_pmulld_Vx_Wx)
1128{
1129 IEMOP_MNEMONIC2(RM, PMULLD, pmulld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1130 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1131 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback));
1132}
1133
1134
1135/** Opcode 0x66 0x0f 0x38 0x41. */
1136FNIEMOP_DEF(iemOp_phminposuw_Vdq_Wdq)
1137{
1138 IEMOP_MNEMONIC2(RM, PHMINPOSUW, phminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1139 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
1140 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_phminposuw_u128, iemAImpl_phminposuw_u128_fallback));
1141}
1142
1143
1144/* Opcode 0x66 0x0f 0x38 0x42 - invalid. */
1145/* Opcode 0x66 0x0f 0x38 0x43 - invalid. */
1146/* Opcode 0x66 0x0f 0x38 0x44 - invalid. */
1147/* Opcode 0x66 0x0f 0x38 0x45 - invalid (vex only). */
1148/* Opcode 0x66 0x0f 0x38 0x46 - invalid (vex only). */
1149/* Opcode 0x66 0x0f 0x38 0x47 - invalid (vex only). */
1150/* Opcode 0x66 0x0f 0x38 0x48 - invalid. */
1151/* Opcode 0x66 0x0f 0x38 0x49 - invalid. */
1152/* Opcode 0x66 0x0f 0x38 0x4a - invalid. */
1153/* Opcode 0x66 0x0f 0x38 0x4b - invalid. */
1154/* Opcode 0x66 0x0f 0x38 0x4c - invalid. */
1155/* Opcode 0x66 0x0f 0x38 0x4d - invalid. */
1156/* Opcode 0x66 0x0f 0x38 0x4e - invalid. */
1157/* Opcode 0x66 0x0f 0x38 0x4f - invalid. */
1158
1159/* Opcode 0x66 0x0f 0x38 0x50 - invalid. */
1160/* Opcode 0x66 0x0f 0x38 0x51 - invalid. */
1161/* Opcode 0x66 0x0f 0x38 0x52 - invalid. */
1162/* Opcode 0x66 0x0f 0x38 0x53 - invalid. */
1163/* Opcode 0x66 0x0f 0x38 0x54 - invalid. */
1164/* Opcode 0x66 0x0f 0x38 0x55 - invalid. */
1165/* Opcode 0x66 0x0f 0x38 0x56 - invalid. */
1166/* Opcode 0x66 0x0f 0x38 0x57 - invalid. */
1167/* Opcode 0x66 0x0f 0x38 0x58 - invalid (vex only). */
1168/* Opcode 0x66 0x0f 0x38 0x59 - invalid (vex only). */
1169/* Opcode 0x66 0x0f 0x38 0x5a - invalid (vex only). */
1170/* Opcode 0x66 0x0f 0x38 0x5b - invalid. */
1171/* Opcode 0x66 0x0f 0x38 0x5c - invalid. */
1172/* Opcode 0x66 0x0f 0x38 0x5d - invalid. */
1173/* Opcode 0x66 0x0f 0x38 0x5e - invalid. */
1174/* Opcode 0x66 0x0f 0x38 0x5f - invalid. */
1175
1176/* Opcode 0x66 0x0f 0x38 0x60 - invalid. */
1177/* Opcode 0x66 0x0f 0x38 0x61 - invalid. */
1178/* Opcode 0x66 0x0f 0x38 0x62 - invalid. */
1179/* Opcode 0x66 0x0f 0x38 0x63 - invalid. */
1180/* Opcode 0x66 0x0f 0x38 0x64 - invalid. */
1181/* Opcode 0x66 0x0f 0x38 0x65 - invalid. */
1182/* Opcode 0x66 0x0f 0x38 0x66 - invalid. */
1183/* Opcode 0x66 0x0f 0x38 0x67 - invalid. */
1184/* Opcode 0x66 0x0f 0x38 0x68 - invalid. */
1185/* Opcode 0x66 0x0f 0x38 0x69 - invalid. */
1186/* Opcode 0x66 0x0f 0x38 0x6a - invalid. */
1187/* Opcode 0x66 0x0f 0x38 0x6b - invalid. */
1188/* Opcode 0x66 0x0f 0x38 0x6c - invalid. */
1189/* Opcode 0x66 0x0f 0x38 0x6d - invalid. */
1190/* Opcode 0x66 0x0f 0x38 0x6e - invalid. */
1191/* Opcode 0x66 0x0f 0x38 0x6f - invalid. */
1192
1193/* Opcode 0x66 0x0f 0x38 0x70 - invalid. */
1194/* Opcode 0x66 0x0f 0x38 0x71 - invalid. */
1195/* Opcode 0x66 0x0f 0x38 0x72 - invalid. */
1196/* Opcode 0x66 0x0f 0x38 0x73 - invalid. */
1197/* Opcode 0x66 0x0f 0x38 0x74 - invalid. */
1198/* Opcode 0x66 0x0f 0x38 0x75 - invalid. */
1199/* Opcode 0x66 0x0f 0x38 0x76 - invalid. */
1200/* Opcode 0x66 0x0f 0x38 0x77 - invalid. */
1201/* Opcode 0x66 0x0f 0x38 0x78 - invalid (vex only). */
1202/* Opcode 0x66 0x0f 0x38 0x79 - invalid (vex only). */
1203/* Opcode 0x66 0x0f 0x38 0x7a - invalid. */
1204/* Opcode 0x66 0x0f 0x38 0x7b - invalid. */
1205/* Opcode 0x66 0x0f 0x38 0x7c - invalid. */
1206/* Opcode 0x66 0x0f 0x38 0x7d - invalid. */
1207/* Opcode 0x66 0x0f 0x38 0x7e - invalid. */
1208/* Opcode 0x66 0x0f 0x38 0x7f - invalid. */
1209
1210/** Opcode 0x66 0x0f 0x38 0x80. */
1211#ifdef VBOX_WITH_NESTED_HWVIRT_VMX_EPT
1212FNIEMOP_DEF(iemOp_invept_Gy_Mdq)
1213{
1214 IEMOP_MNEMONIC(invept, "invept Gy,Mdq");
1215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1216 IEMOP_HLP_IN_VMX_OPERATION("invept", kVmxVDiag_Invept);
1217 IEMOP_HLP_VMX_INSTR("invept", kVmxVDiag_Invept);
1218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1219 if (IEM_IS_MODRM_MEM_MODE(bRm))
1220 {
1221 /* Register, memory. */
1222 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1223 {
1224 IEM_MC_BEGIN(3, 0);
1225 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1226 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1227 IEM_MC_ARG(uint64_t, uInveptType, 2);
1228 IEM_MC_FETCH_GREG_U64(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1230 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1231 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 IEM_MC_BEGIN(3, 0);
1237 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1238 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1239 IEM_MC_ARG(uint32_t, uInveptType, 2);
1240 IEM_MC_FETCH_GREG_U32(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1242 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1243 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1244 IEM_MC_END();
1245 }
1246 }
1247 Log(("iemOp_invept_Gy_Mdq: invalid encoding -> #UD\n"));
1248 return IEMOP_RAISE_INVALID_OPCODE();
1249}
1250#else
1251FNIEMOP_STUB(iemOp_invept_Gy_Mdq);
1252#endif
1253
1254/** Opcode 0x66 0x0f 0x38 0x81. */
1255#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1256FNIEMOP_DEF(iemOp_invvpid_Gy_Mdq)
1257{
1258 IEMOP_MNEMONIC(invvpid, "invvpid Gy,Mdq");
1259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1260 IEMOP_HLP_IN_VMX_OPERATION("invvpid", kVmxVDiag_Invvpid);
1261 IEMOP_HLP_VMX_INSTR("invvpid", kVmxVDiag_Invvpid);
1262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1263 if (IEM_IS_MODRM_MEM_MODE(bRm))
1264 {
1265 /* Register, memory. */
1266 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1267 {
1268 IEM_MC_BEGIN(3, 0);
1269 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1270 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1271 IEM_MC_ARG(uint64_t, uInvvpidType, 2);
1272 IEM_MC_FETCH_GREG_U64(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1274 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1275 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1276 IEM_MC_END();
1277 }
1278 else
1279 {
1280 IEM_MC_BEGIN(3, 0);
1281 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1282 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1283 IEM_MC_ARG(uint32_t, uInvvpidType, 2);
1284 IEM_MC_FETCH_GREG_U32(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1286 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1287 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1288 IEM_MC_END();
1289 }
1290 }
1291 Log(("iemOp_invvpid_Gy_Mdq: invalid encoding -> #UD\n"));
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293}
1294#else
1295FNIEMOP_STUB(iemOp_invvpid_Gy_Mdq);
1296#endif
1297
1298/** Opcode 0x66 0x0f 0x38 0x82. */
1299FNIEMOP_DEF(iemOp_invpcid_Gy_Mdq)
1300{
1301 IEMOP_MNEMONIC(invpcid, "invpcid Gy,Mdq");
1302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1304 if (IEM_IS_MODRM_MEM_MODE(bRm))
1305 {
1306 /* Register, memory. */
1307 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1308 {
1309 IEM_MC_BEGIN(3, 0);
1310 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1311 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1312 IEM_MC_ARG(uint64_t, uInvpcidType, 2);
1313 IEM_MC_FETCH_GREG_U64(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1315 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1316 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1317 IEM_MC_END();
1318 }
1319 else
1320 {
1321 IEM_MC_BEGIN(3, 0);
1322 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1323 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1324 IEM_MC_ARG(uint32_t, uInvpcidType, 2);
1325 IEM_MC_FETCH_GREG_U32(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1327 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1328 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1329 IEM_MC_END();
1330 }
1331 }
1332 Log(("iemOp_invpcid_Gy_Mdq: invalid encoding -> #UD\n"));
1333 return IEMOP_RAISE_INVALID_OPCODE();
1334}
1335
1336
1337/* Opcode 0x66 0x0f 0x38 0x83 - invalid. */
1338/* Opcode 0x66 0x0f 0x38 0x84 - invalid. */
1339/* Opcode 0x66 0x0f 0x38 0x85 - invalid. */
1340/* Opcode 0x66 0x0f 0x38 0x86 - invalid. */
1341/* Opcode 0x66 0x0f 0x38 0x87 - invalid. */
1342/* Opcode 0x66 0x0f 0x38 0x88 - invalid. */
1343/* Opcode 0x66 0x0f 0x38 0x89 - invalid. */
1344/* Opcode 0x66 0x0f 0x38 0x8a - invalid. */
1345/* Opcode 0x66 0x0f 0x38 0x8b - invalid. */
1346/* Opcode 0x66 0x0f 0x38 0x8c - invalid (vex only). */
1347/* Opcode 0x66 0x0f 0x38 0x8d - invalid. */
1348/* Opcode 0x66 0x0f 0x38 0x8e - invalid (vex only). */
1349/* Opcode 0x66 0x0f 0x38 0x8f - invalid. */
1350
1351/* Opcode 0x66 0x0f 0x38 0x90 - invalid (vex only). */
1352/* Opcode 0x66 0x0f 0x38 0x91 - invalid (vex only). */
1353/* Opcode 0x66 0x0f 0x38 0x92 - invalid (vex only). */
1354/* Opcode 0x66 0x0f 0x38 0x93 - invalid (vex only). */
1355/* Opcode 0x66 0x0f 0x38 0x94 - invalid. */
1356/* Opcode 0x66 0x0f 0x38 0x95 - invalid. */
1357/* Opcode 0x66 0x0f 0x38 0x96 - invalid (vex only). */
1358/* Opcode 0x66 0x0f 0x38 0x97 - invalid (vex only). */
1359/* Opcode 0x66 0x0f 0x38 0x98 - invalid (vex only). */
1360/* Opcode 0x66 0x0f 0x38 0x99 - invalid (vex only). */
1361/* Opcode 0x66 0x0f 0x38 0x9a - invalid (vex only). */
1362/* Opcode 0x66 0x0f 0x38 0x9b - invalid (vex only). */
1363/* Opcode 0x66 0x0f 0x38 0x9c - invalid (vex only). */
1364/* Opcode 0x66 0x0f 0x38 0x9d - invalid (vex only). */
1365/* Opcode 0x66 0x0f 0x38 0x9e - invalid (vex only). */
1366/* Opcode 0x66 0x0f 0x38 0x9f - invalid (vex only). */
1367
1368/* Opcode 0x66 0x0f 0x38 0xa0 - invalid. */
1369/* Opcode 0x66 0x0f 0x38 0xa1 - invalid. */
1370/* Opcode 0x66 0x0f 0x38 0xa2 - invalid. */
1371/* Opcode 0x66 0x0f 0x38 0xa3 - invalid. */
1372/* Opcode 0x66 0x0f 0x38 0xa4 - invalid. */
1373/* Opcode 0x66 0x0f 0x38 0xa5 - invalid. */
1374/* Opcode 0x66 0x0f 0x38 0xa6 - invalid (vex only). */
1375/* Opcode 0x66 0x0f 0x38 0xa7 - invalid (vex only). */
1376/* Opcode 0x66 0x0f 0x38 0xa8 - invalid (vex only). */
1377/* Opcode 0x66 0x0f 0x38 0xa9 - invalid (vex only). */
1378/* Opcode 0x66 0x0f 0x38 0xaa - invalid (vex only). */
1379/* Opcode 0x66 0x0f 0x38 0xab - invalid (vex only). */
1380/* Opcode 0x66 0x0f 0x38 0xac - invalid (vex only). */
1381/* Opcode 0x66 0x0f 0x38 0xad - invalid (vex only). */
1382/* Opcode 0x66 0x0f 0x38 0xae - invalid (vex only). */
1383/* Opcode 0x66 0x0f 0x38 0xaf - invalid (vex only). */
1384
1385/* Opcode 0x66 0x0f 0x38 0xb0 - invalid. */
1386/* Opcode 0x66 0x0f 0x38 0xb1 - invalid. */
1387/* Opcode 0x66 0x0f 0x38 0xb2 - invalid. */
1388/* Opcode 0x66 0x0f 0x38 0xb3 - invalid. */
1389/* Opcode 0x66 0x0f 0x38 0xb4 - invalid. */
1390/* Opcode 0x66 0x0f 0x38 0xb5 - invalid. */
1391/* Opcode 0x66 0x0f 0x38 0xb6 - invalid (vex only). */
1392/* Opcode 0x66 0x0f 0x38 0xb7 - invalid (vex only). */
1393/* Opcode 0x66 0x0f 0x38 0xb8 - invalid (vex only). */
1394/* Opcode 0x66 0x0f 0x38 0xb9 - invalid (vex only). */
1395/* Opcode 0x66 0x0f 0x38 0xba - invalid (vex only). */
1396/* Opcode 0x66 0x0f 0x38 0xbb - invalid (vex only). */
1397/* Opcode 0x66 0x0f 0x38 0xbc - invalid (vex only). */
1398/* Opcode 0x66 0x0f 0x38 0xbd - invalid (vex only). */
1399/* Opcode 0x66 0x0f 0x38 0xbe - invalid (vex only). */
1400/* Opcode 0x66 0x0f 0x38 0xbf - invalid (vex only). */
1401
1402/* Opcode 0x0f 0x38 0xc0 - invalid. */
1403/* Opcode 0x66 0x0f 0x38 0xc0 - invalid. */
1404/* Opcode 0x0f 0x38 0xc1 - invalid. */
1405/* Opcode 0x66 0x0f 0x38 0xc1 - invalid. */
1406/* Opcode 0x0f 0x38 0xc2 - invalid. */
1407/* Opcode 0x66 0x0f 0x38 0xc2 - invalid. */
1408/* Opcode 0x0f 0x38 0xc3 - invalid. */
1409/* Opcode 0x66 0x0f 0x38 0xc3 - invalid. */
1410/* Opcode 0x0f 0x38 0xc4 - invalid. */
1411/* Opcode 0x66 0x0f 0x38 0xc4 - invalid. */
1412/* Opcode 0x0f 0x38 0xc5 - invalid. */
1413/* Opcode 0x66 0x0f 0x38 0xc5 - invalid. */
1414/* Opcode 0x0f 0x38 0xc6 - invalid. */
1415/* Opcode 0x66 0x0f 0x38 0xc6 - invalid. */
1416/* Opcode 0x0f 0x38 0xc7 - invalid. */
1417/* Opcode 0x66 0x0f 0x38 0xc7 - invalid. */
1418/** Opcode 0x0f 0x38 0xc8. */
1419FNIEMOP_STUB(iemOp_sha1nexte_Vdq_Wdq);
1420/* Opcode 0x66 0x0f 0x38 0xc8 - invalid. */
1421/** Opcode 0x0f 0x38 0xc9. */
1422FNIEMOP_STUB(iemOp_sha1msg1_Vdq_Wdq);
1423/* Opcode 0x66 0x0f 0x38 0xc9 - invalid. */
1424/** Opcode 0x0f 0x38 0xca. */
1425FNIEMOP_STUB(iemOp_sha1msg2_Vdq_Wdq);
1426/* Opcode 0x66 0x0f 0x38 0xca - invalid. */
1427/** Opcode 0x0f 0x38 0xcb. */
1428FNIEMOP_STUB(iemOp_sha256rnds2_Vdq_Wdq);
1429/* Opcode 0x66 0x0f 0x38 0xcb - invalid. */
1430/** Opcode 0x0f 0x38 0xcc. */
1431FNIEMOP_STUB(iemOp_sha256msg1_Vdq_Wdq);
1432/* Opcode 0x66 0x0f 0x38 0xcc - invalid. */
1433/** Opcode 0x0f 0x38 0xcd. */
1434FNIEMOP_STUB(iemOp_sha256msg2_Vdq_Wdq);
1435/* Opcode 0x66 0x0f 0x38 0xcd - invalid. */
1436/* Opcode 0x0f 0x38 0xce - invalid. */
1437/* Opcode 0x66 0x0f 0x38 0xce - invalid. */
1438/* Opcode 0x0f 0x38 0xcf - invalid. */
1439/* Opcode 0x66 0x0f 0x38 0xcf - invalid. */
1440
1441/* Opcode 0x66 0x0f 0x38 0xd0 - invalid. */
1442/* Opcode 0x66 0x0f 0x38 0xd1 - invalid. */
1443/* Opcode 0x66 0x0f 0x38 0xd2 - invalid. */
1444/* Opcode 0x66 0x0f 0x38 0xd3 - invalid. */
1445/* Opcode 0x66 0x0f 0x38 0xd4 - invalid. */
1446/* Opcode 0x66 0x0f 0x38 0xd5 - invalid. */
1447/* Opcode 0x66 0x0f 0x38 0xd6 - invalid. */
1448/* Opcode 0x66 0x0f 0x38 0xd7 - invalid. */
1449/* Opcode 0x66 0x0f 0x38 0xd8 - invalid. */
1450/* Opcode 0x66 0x0f 0x38 0xd9 - invalid. */
1451/* Opcode 0x66 0x0f 0x38 0xda - invalid. */
1452
1453
1454/** Opcode 0x66 0x0f 0x38 0xdb. */
1455FNIEMOP_DEF(iemOp_aesimc_Vdq_Wdq)
1456{
1457 IEMOP_MNEMONIC2(RM, AESIMC, aesimc, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1458 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1459 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesimc_u128, iemAImpl_aesimc_u128_fallback));
1460}
1461
1462
1463/** Opcode 0x66 0x0f 0x38 0xdc. */
1464FNIEMOP_DEF(iemOp_aesenc_Vdq_Wdq)
1465{
1466 IEMOP_MNEMONIC2(RM, AESENC, aesenc, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1467 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1468 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesenc_u128, iemAImpl_aesenc_u128_fallback));
1469}
1470
1471
1472/** Opcode 0x66 0x0f 0x38 0xdd. */
1473FNIEMOP_DEF(iemOp_aesenclast_Vdq_Wdq)
1474{
1475 IEMOP_MNEMONIC2(RM, AESENCLAST, aesenclast, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1476 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1477 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesenclast_u128, iemAImpl_aesenclast_u128_fallback));
1478}
1479
1480
1481/** Opcode 0x66 0x0f 0x38 0xde. */
1482FNIEMOP_DEF(iemOp_aesdec_Vdq_Wdq)
1483{
1484 IEMOP_MNEMONIC2(RM, AESDEC, aesdec, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1485 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1486 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesdec_u128, iemAImpl_aesdec_u128_fallback));
1487}
1488
1489
1490/** Opcode 0x66 0x0f 0x38 0xdf. */
1491FNIEMOP_DEF(iemOp_aesdeclast_Vdq_Wdq)
1492{
1493 IEMOP_MNEMONIC2(RM, AESDECLAST, aesdeclast, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1494 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1495 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesdeclast_u128, iemAImpl_aesdeclast_u128_fallback));
1496}
1497
1498
1499/* Opcode 0x66 0x0f 0x38 0xe0 - invalid. */
1500/* Opcode 0x66 0x0f 0x38 0xe1 - invalid. */
1501/* Opcode 0x66 0x0f 0x38 0xe2 - invalid. */
1502/* Opcode 0x66 0x0f 0x38 0xe3 - invalid. */
1503/* Opcode 0x66 0x0f 0x38 0xe4 - invalid. */
1504/* Opcode 0x66 0x0f 0x38 0xe5 - invalid. */
1505/* Opcode 0x66 0x0f 0x38 0xe6 - invalid. */
1506/* Opcode 0x66 0x0f 0x38 0xe7 - invalid. */
1507/* Opcode 0x66 0x0f 0x38 0xe8 - invalid. */
1508/* Opcode 0x66 0x0f 0x38 0xe9 - invalid. */
1509/* Opcode 0x66 0x0f 0x38 0xea - invalid. */
1510/* Opcode 0x66 0x0f 0x38 0xeb - invalid. */
1511/* Opcode 0x66 0x0f 0x38 0xec - invalid. */
1512/* Opcode 0x66 0x0f 0x38 0xed - invalid. */
1513/* Opcode 0x66 0x0f 0x38 0xee - invalid. */
1514/* Opcode 0x66 0x0f 0x38 0xef - invalid. */
1515
1516
1517/** Opcode 0x0f 0x38 0xf0. */
1518FNIEMOP_STUB(iemOp_movbe_Gy_My);
1519/** Opcode 0x66 0x0f 0x38 0xf0. */
1520FNIEMOP_STUB(iemOp_movbe_Gw_Mw);
1521/* Opcode 0xf3 0x0f 0x38 0xf0 - invalid. */
1522
1523
1524/** Opcode 0xf2 0x0f 0x38 0xf0. */
1525FNIEMOP_DEF(iemOp_crc32_Gd_Eb)
1526{
1527 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Eb, DISOPTYPE_HARMLESS, 0);
1528 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1529 return iemOp_InvalidNeedRM(pVCpu);
1530
1531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1532 if (IEM_IS_MODRM_REG_MODE(bRm))
1533 {
1534 /*
1535 * Register, register.
1536 */
1537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1538 IEM_MC_BEGIN(2, 0);
1539 IEM_MC_ARG(uint32_t *, puDst, 0);
1540 IEM_MC_ARG(uint8_t, uSrc, 1);
1541 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1542 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1543 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1544 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1545 IEM_MC_ADVANCE_RIP_AND_FINISH();
1546 IEM_MC_END();
1547 }
1548 else
1549 {
1550 /*
1551 * Register, memory.
1552 */
1553 IEM_MC_BEGIN(2, 1);
1554 IEM_MC_ARG(uint32_t *, puDst, 0);
1555 IEM_MC_ARG(uint8_t, uSrc, 1);
1556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1557
1558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1560 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1561
1562 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1563 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1564 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1565
1566 IEM_MC_ADVANCE_RIP_AND_FINISH();
1567 IEM_MC_END();
1568 }
1569 return VINF_SUCCESS;
1570}
1571
1572
1573/** Opcode 0x0f 0x38 0xf1. */
1574FNIEMOP_STUB(iemOp_movbe_My_Gy);
1575/** Opcode 0x66 0x0f 0x38 0xf1. */
1576FNIEMOP_STUB(iemOp_movbe_Mw_Gw);
1577/* Opcode 0xf3 0x0f 0x38 0xf1 - invalid. */
1578
1579
1580/** Opcode 0xf2 0x0f 0x38 0xf1. */
1581FNIEMOP_DEF(iemOp_crc32_Gv_Ev)
1582{
1583 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Ev, DISOPTYPE_HARMLESS, 0);
1584 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1585 return iemOp_InvalidNeedRM(pVCpu);
1586
1587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1588 if (IEM_IS_MODRM_REG_MODE(bRm))
1589 {
1590 /*
1591 * Register, register.
1592 */
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1594 switch (pVCpu->iem.s.enmEffOpSize)
1595 {
1596 case IEMMODE_16BIT:
1597 IEM_MC_BEGIN(2, 0);
1598 IEM_MC_ARG(uint32_t *, puDst, 0);
1599 IEM_MC_ARG(uint16_t, uSrc, 1);
1600 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1601 IEM_MC_FETCH_GREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1602 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1603 puDst, uSrc);
1604 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1605 IEM_MC_ADVANCE_RIP_AND_FINISH();
1606 IEM_MC_END();
1607 return VINF_SUCCESS;
1608
1609 case IEMMODE_32BIT:
1610 IEM_MC_BEGIN(2, 0);
1611 IEM_MC_ARG(uint32_t *, puDst, 0);
1612 IEM_MC_ARG(uint32_t, uSrc, 1);
1613 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1614 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1615 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1616 puDst, uSrc);
1617 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1618 IEM_MC_ADVANCE_RIP_AND_FINISH();
1619 IEM_MC_END();
1620 return VINF_SUCCESS;
1621
1622 case IEMMODE_64BIT:
1623 IEM_MC_BEGIN(2, 0);
1624 IEM_MC_ARG(uint32_t *, puDst, 0);
1625 IEM_MC_ARG(uint64_t, uSrc, 1);
1626 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1627 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1628 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1629 puDst, uSrc);
1630 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1631 IEM_MC_ADVANCE_RIP_AND_FINISH();
1632 IEM_MC_END();
1633 return VINF_SUCCESS;
1634
1635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1636 }
1637 }
1638 else
1639 {
1640 /*
1641 * Register, memory.
1642 */
1643 switch (pVCpu->iem.s.enmEffOpSize)
1644 {
1645 case IEMMODE_16BIT:
1646 IEM_MC_BEGIN(2, 1);
1647 IEM_MC_ARG(uint32_t *, puDst, 0);
1648 IEM_MC_ARG(uint16_t, uSrc, 1);
1649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1650
1651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1653 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1654
1655 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1656 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1657 puDst, uSrc);
1658 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1659
1660 IEM_MC_ADVANCE_RIP_AND_FINISH();
1661 IEM_MC_END();
1662 return VINF_SUCCESS;
1663
1664 case IEMMODE_32BIT:
1665 IEM_MC_BEGIN(2, 1);
1666 IEM_MC_ARG(uint32_t *, puDst, 0);
1667 IEM_MC_ARG(uint32_t, uSrc, 1);
1668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1669
1670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1672 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1673
1674 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1675 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1676 puDst, uSrc);
1677 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1678
1679 IEM_MC_ADVANCE_RIP_AND_FINISH();
1680 IEM_MC_END();
1681 return VINF_SUCCESS;
1682
1683 case IEMMODE_64BIT:
1684 IEM_MC_BEGIN(2, 1);
1685 IEM_MC_ARG(uint32_t *, puDst, 0);
1686 IEM_MC_ARG(uint64_t, uSrc, 1);
1687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1688
1689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1691 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1692
1693 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1694 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1695 puDst, uSrc);
1696 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1697
1698 IEM_MC_ADVANCE_RIP_AND_FINISH();
1699 IEM_MC_END();
1700 return VINF_SUCCESS;
1701
1702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1703 }
1704 }
1705}
1706
1707
1708/* Opcode 0x0f 0x38 0xf2 - invalid (vex only). */
1709/* Opcode 0x66 0x0f 0x38 0xf2 - invalid. */
1710/* Opcode 0xf3 0x0f 0x38 0xf2 - invalid. */
1711/* Opcode 0xf2 0x0f 0x38 0xf2 - invalid. */
1712
1713/* Opcode 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1714/* Opcode 0x66 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1715/* Opcode 0xf3 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1716/* Opcode 0xf2 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1717
1718/* Opcode 0x0f 0x38 0xf4 - invalid. */
1719/* Opcode 0x66 0x0f 0x38 0xf4 - invalid. */
1720/* Opcode 0xf3 0x0f 0x38 0xf4 - invalid. */
1721/* Opcode 0xf2 0x0f 0x38 0xf4 - invalid. */
1722
1723/* Opcode 0x0f 0x38 0xf5 - invalid (vex only). */
1724/* Opcode 0x66 0x0f 0x38 0xf5 - invalid. */
1725/* Opcode 0xf3 0x0f 0x38 0xf5 - invalid (vex only). */
1726/* Opcode 0xf2 0x0f 0x38 0xf5 - invalid (vex only). */
1727
1728/* Opcode 0x0f 0x38 0xf6 - invalid. */
1729/** Opcode 0x66 0x0f 0x38 0xf6. */
1730FNIEMOP_STUB(iemOp_adcx_Gy_Ey);
1731/** Opcode 0xf3 0x0f 0x38 0xf6. */
1732FNIEMOP_STUB(iemOp_adox_Gy_Ey);
1733/* Opcode 0xf2 0x0f 0x38 0xf6 - invalid (vex only). */
1734
1735/* Opcode 0x0f 0x38 0xf7 - invalid (vex only). */
1736/* Opcode 0x66 0x0f 0x38 0xf7 - invalid (vex only). */
1737/* Opcode 0xf3 0x0f 0x38 0xf7 - invalid (vex only). */
1738/* Opcode 0xf2 0x0f 0x38 0xf7 - invalid (vex only). */
1739
1740/* Opcode 0x0f 0x38 0xf8 - invalid. */
1741/* Opcode 0x66 0x0f 0x38 0xf8 - invalid. */
1742/* Opcode 0xf3 0x0f 0x38 0xf8 - invalid. */
1743/* Opcode 0xf2 0x0f 0x38 0xf8 - invalid. */
1744
1745/* Opcode 0x0f 0x38 0xf9 - invalid. */
1746/* Opcode 0x66 0x0f 0x38 0xf9 - invalid. */
1747/* Opcode 0xf3 0x0f 0x38 0xf9 - invalid. */
1748/* Opcode 0xf2 0x0f 0x38 0xf9 - invalid. */
1749
1750/* Opcode 0x0f 0x38 0xfa - invalid. */
1751/* Opcode 0x66 0x0f 0x38 0xfa - invalid. */
1752/* Opcode 0xf3 0x0f 0x38 0xfa - invalid. */
1753/* Opcode 0xf2 0x0f 0x38 0xfa - invalid. */
1754
1755/* Opcode 0x0f 0x38 0xfb - invalid. */
1756/* Opcode 0x66 0x0f 0x38 0xfb - invalid. */
1757/* Opcode 0xf3 0x0f 0x38 0xfb - invalid. */
1758/* Opcode 0xf2 0x0f 0x38 0xfb - invalid. */
1759
1760/* Opcode 0x0f 0x38 0xfc - invalid. */
1761/* Opcode 0x66 0x0f 0x38 0xfc - invalid. */
1762/* Opcode 0xf3 0x0f 0x38 0xfc - invalid. */
1763/* Opcode 0xf2 0x0f 0x38 0xfc - invalid. */
1764
1765/* Opcode 0x0f 0x38 0xfd - invalid. */
1766/* Opcode 0x66 0x0f 0x38 0xfd - invalid. */
1767/* Opcode 0xf3 0x0f 0x38 0xfd - invalid. */
1768/* Opcode 0xf2 0x0f 0x38 0xfd - invalid. */
1769
1770/* Opcode 0x0f 0x38 0xfe - invalid. */
1771/* Opcode 0x66 0x0f 0x38 0xfe - invalid. */
1772/* Opcode 0xf3 0x0f 0x38 0xfe - invalid. */
1773/* Opcode 0xf2 0x0f 0x38 0xfe - invalid. */
1774
1775/* Opcode 0x0f 0x38 0xff - invalid. */
1776/* Opcode 0x66 0x0f 0x38 0xff - invalid. */
1777/* Opcode 0xf3 0x0f 0x38 0xff - invalid. */
1778/* Opcode 0xf2 0x0f 0x38 0xff - invalid. */
1779
1780
1781/**
1782 * Three byte opcode map, first two bytes are 0x0f 0x38.
1783 * @sa g_apfnVexMap2
1784 */
1785IEM_STATIC const PFNIEMOP g_apfnThreeByte0f38[] =
1786{
1787 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1788 /* 0x00 */ iemOp_pshufb_Pq_Qq, iemOp_pshufb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1789 /* 0x01 */ iemOp_phaddw_Pq_Qq, iemOp_phaddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1790 /* 0x02 */ iemOp_phaddd_Pq_Qq, iemOp_phaddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1791 /* 0x03 */ iemOp_phaddsw_Pq_Qq, iemOp_phaddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1792 /* 0x04 */ iemOp_pmaddubsw_Pq_Qq, iemOp_pmaddubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1793 /* 0x05 */ iemOp_phsubw_Pq_Qq, iemOp_phsubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1794 /* 0x06 */ iemOp_phsubd_Pq_Qq, iemOp_phsubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1795 /* 0x07 */ iemOp_phsubsw_Pq_Qq, iemOp_phsubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1796 /* 0x08 */ iemOp_psignb_Pq_Qq, iemOp_psignb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1797 /* 0x09 */ iemOp_psignw_Pq_Qq, iemOp_psignw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1798 /* 0x0a */ iemOp_psignd_Pq_Qq, iemOp_psignd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1799 /* 0x0b */ iemOp_pmulhrsw_Pq_Qq, iemOp_pmulhrsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1800 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
1801 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
1802 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
1803 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
1804
1805 /* 0x10 */ iemOp_InvalidNeedRM, iemOp_pblendvb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1806 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
1807 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
1808 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
1809 /* 0x14 */ iemOp_InvalidNeedRM, iemOp_blendvps_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1810 /* 0x15 */ iemOp_InvalidNeedRM, iemOp_blendvpd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1811 /* 0x16 */ IEMOP_X4(iemOp_InvalidNeedRM),
1812 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_ptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1813 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
1814 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
1815 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
1816 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
1817 /* 0x1c */ iemOp_pabsb_Pq_Qq, iemOp_pabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1818 /* 0x1d */ iemOp_pabsw_Pq_Qq, iemOp_pabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1819 /* 0x1e */ iemOp_pabsd_Pq_Qq, iemOp_pabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1820 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
1821
1822 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_pmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1823 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_pmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1824 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_pmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1825 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_pmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1826 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_pmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1827 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_pmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1828 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
1829 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
1830 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_pmuldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1831 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_pcmpeqq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1832 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_movntdqa_Vdq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1833 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_packusdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1834 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRM),
1835 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRM),
1836 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRM),
1837 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRM),
1838
1839 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_pmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1840 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_pmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1841 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_pmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1842 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_pmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1843 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_pmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1844 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_pmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1845 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
1846 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_pcmpgtq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1847 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_pminsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1848 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_pminsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1849 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_pminuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1850 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_pminud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1851 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_pmaxsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1852 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_pmaxsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1853 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_pmaxuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1854 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_pmaxud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1855
1856 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_pmulld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1857 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_phminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1858 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
1859 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
1860 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
1861 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
1862 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
1863 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
1864 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
1865 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
1866 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
1867 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
1868 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
1869 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
1870 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
1871 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
1872
1873 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
1874 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
1875 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
1876 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
1877 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
1878 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
1879 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
1880 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
1881 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRM),
1882 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRM),
1883 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRM),
1884 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
1885 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
1886 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
1887 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
1888 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
1889
1890 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
1891 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
1892 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
1893 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
1894 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
1895 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
1896 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
1897 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
1898 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
1899 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
1900 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
1901 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
1902 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
1903 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
1904 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
1905 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
1906
1907 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
1908 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
1909 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
1910 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
1911 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
1912 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
1913 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
1914 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
1915 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
1916 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
1917 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
1918 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
1919 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
1920 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
1921 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
1922 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
1923
1924 /* 0x80 */ iemOp_InvalidNeedRM, iemOp_invept_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1925 /* 0x81 */ iemOp_InvalidNeedRM, iemOp_invvpid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1926 /* 0x82 */ iemOp_InvalidNeedRM, iemOp_invpcid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1927 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
1928 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
1929 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
1930 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
1931 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
1932 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
1933 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
1934 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
1935 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
1936 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
1937 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
1938 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
1939 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
1940
1941 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
1942 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
1943 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
1944 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
1945 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
1946 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
1947 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
1948 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
1949 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
1950 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
1951 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
1952 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
1953 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
1954 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
1955 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
1956 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
1957
1958 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1959 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1960 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1961 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1962 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1963 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1964 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1965 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1966 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1967 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1968 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
1969 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
1970 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
1971 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
1972 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRM),
1973 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
1974
1975 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1976 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1977 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1978 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1979 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1980 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1981 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1982 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
1983 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
1984 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
1985 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
1986 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
1987 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
1988 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
1989 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
1990 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
1991
1992 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
1993 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
1994 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
1995 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
1996 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
1997 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
1998 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
1999 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2000 /* 0xc8 */ iemOp_sha1nexte_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2001 /* 0xc9 */ iemOp_sha1msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2002 /* 0xca */ iemOp_sha1msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2003 /* 0xcb */ iemOp_sha256rnds2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2004 /* 0xcc */ iemOp_sha256msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2005 /* 0xcd */ iemOp_sha256msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2006 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
2007 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
2008
2009 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2010 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2011 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2012 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2013 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2014 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2015 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2016 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2017 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2018 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2019 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
2020 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_aesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2021 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_aesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2022 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_aesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2023 /* 0xde */ iemOp_InvalidNeedRM, iemOp_aesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2024 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_aesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2025
2026 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2027 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2028 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2029 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2030 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2031 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2032 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2033 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2034 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2035 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2036 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
2037 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
2038 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
2039 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
2040 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
2041 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
2042
2043 /* 0xf0 */ iemOp_movbe_Gy_My, iemOp_movbe_Gw_Mw, iemOp_InvalidNeedRM, iemOp_crc32_Gd_Eb,
2044 /* 0xf1 */ iemOp_movbe_My_Gy, iemOp_movbe_Mw_Gw, iemOp_InvalidNeedRM, iemOp_crc32_Gv_Ev,
2045 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2046 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2047 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2048 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2049 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_adcx_Gy_Ey, iemOp_adox_Gy_Ey, iemOp_InvalidNeedRM,
2050 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2051 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2052 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2053 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
2054 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
2055 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
2056 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
2057 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
2058 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
2059};
2060AssertCompile(RT_ELEMENTS(g_apfnThreeByte0f38) == 1024);
2061
2062/** @} */
2063
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette