VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h@ 99051

Last change on this file since 99051 was 98827, checked in by vboxsync, 21 months ago

VMM/IEM: Implement adcx/adox instructions emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 97.6 KB
Line 
1/* $Id: IEMAllInstructionsThree0f38.cpp.h 98827 2023-03-03 12:01:42Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap2.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Three byte opcodes with first two bytes 0x0f 0x38
33 * @{
34 */
35
36FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported); /* in IEMAllInstructionsTwoByteOf.cpp.h */
37
38
39/**
40 * Common worker for SSSE3 instructions on the forms:
41 * pxxx xmm1, xmm2/mem128
42 *
43 * Proper alignment of the 128-bit operand is enforced.
44 * Exceptions type 4. SSSE3 cpuid checks.
45 *
46 * @sa iemOpCommonSse2_FullFull_To_Full
47 */
48FNIEMOP_DEF_1(iemOpCommonSsse3_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
49{
50 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
51 if (IEM_IS_MODRM_REG_MODE(bRm))
52 {
53 /*
54 * Register, register.
55 */
56 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
57 IEM_MC_BEGIN(2, 0);
58 IEM_MC_ARG(PRTUINT128U, puDst, 0);
59 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
60 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
61 IEM_MC_PREPARE_SSE_USAGE();
62 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
63 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(PRTUINT128U, puDst, 0);
75 IEM_MC_LOCAL(RTUINT128U, uSrc);
76 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_SSE_USAGE();
85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
87
88 IEM_MC_ADVANCE_RIP_AND_FINISH();
89 IEM_MC_END();
90 }
91}
92
93
94/**
95 * Common worker for SSE4.1 instructions on the forms:
96 * pxxx xmm1, xmm2/mem128
97 *
98 * Proper alignment of the 128-bit operand is enforced.
99 * Exceptions type 4. SSE4.1 cpuid checks.
100 *
101 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
102 * iemOpCommonSse42_FullFull_To_Full
103 */
104FNIEMOP_DEF_1(iemOpCommonSse41_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * Register, register.
111 */
112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
113 IEM_MC_BEGIN(2, 0);
114 IEM_MC_ARG(PRTUINT128U, puDst, 0);
115 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
116 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
117 IEM_MC_PREPARE_SSE_USAGE();
118 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
119 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
120 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
121 IEM_MC_ADVANCE_RIP_AND_FINISH();
122 IEM_MC_END();
123 }
124 else
125 {
126 /*
127 * Register, memory.
128 */
129 IEM_MC_BEGIN(2, 2);
130 IEM_MC_ARG(PRTUINT128U, puDst, 0);
131 IEM_MC_LOCAL(RTUINT128U, uSrc);
132 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
134
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
137 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
138 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
139
140 IEM_MC_PREPARE_SSE_USAGE();
141 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
143
144 IEM_MC_ADVANCE_RIP_AND_FINISH();
145 IEM_MC_END();
146 }
147}
148
149
150/**
151 * Common worker for SSE4.1 instructions on the forms:
152 * pxxx xmm1, xmm2/mem128
153 *
154 * Proper alignment of the 128-bit operand is enforced.
155 * Exceptions type 4. SSE4.1 cpuid checks.
156 *
157 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
158 * takes no FXSAVE state, just the operands.
159 *
160 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
161 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full
162 */
163FNIEMOP_DEF_1(iemOpCommonSse41Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
164{
165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
166 if (IEM_IS_MODRM_REG_MODE(bRm))
167 {
168 /*
169 * Register, register.
170 */
171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
172 IEM_MC_BEGIN(2, 0);
173 IEM_MC_ARG(PRTUINT128U, puDst, 0);
174 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
175 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
176 IEM_MC_PREPARE_SSE_USAGE();
177 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
178 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
179 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
180 IEM_MC_ADVANCE_RIP_AND_FINISH();
181 IEM_MC_END();
182 }
183 else
184 {
185 /*
186 * Register, memory.
187 */
188 IEM_MC_BEGIN(2, 2);
189 IEM_MC_ARG(PRTUINT128U, puDst, 0);
190 IEM_MC_LOCAL(RTUINT128U, uSrc);
191 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
193
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
197 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
198
199 IEM_MC_PREPARE_SSE_USAGE();
200 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
201 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
202
203 IEM_MC_ADVANCE_RIP_AND_FINISH();
204 IEM_MC_END();
205 }
206}
207
208
209/**
210 * Common worker for SSE4.2 instructions on the forms:
211 * pxxx xmm1, xmm2/mem128
212 *
213 * Proper alignment of the 128-bit operand is enforced.
214 * Exceptions type 4. SSE4.2 cpuid checks.
215 *
216 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
217 * iemOpCommonSse41_FullFull_To_Full
218 */
219FNIEMOP_DEF_1(iemOpCommonSse42_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
220{
221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
222 if (IEM_IS_MODRM_REG_MODE(bRm))
223 {
224 /*
225 * Register, register.
226 */
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_BEGIN(2, 0);
229 IEM_MC_ARG(PRTUINT128U, puDst, 0);
230 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
231 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
232 IEM_MC_PREPARE_SSE_USAGE();
233 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
234 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
235 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
236 IEM_MC_ADVANCE_RIP_AND_FINISH();
237 IEM_MC_END();
238 }
239 else
240 {
241 /*
242 * Register, memory.
243 */
244 IEM_MC_BEGIN(2, 2);
245 IEM_MC_ARG(PRTUINT128U, puDst, 0);
246 IEM_MC_LOCAL(RTUINT128U, uSrc);
247 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
249
250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
252 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
253 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
254
255 IEM_MC_PREPARE_SSE_USAGE();
256 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
257 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
258
259 IEM_MC_ADVANCE_RIP_AND_FINISH();
260 IEM_MC_END();
261 }
262}
263
264
265/**
266 * Common worker for SSE-style AES-NI instructions of the form:
267 * aesxxx xmm1, xmm2/mem128
268 *
269 * Proper alignment of the 128-bit operand is enforced.
270 * Exceptions type 4. AES-NI cpuid checks.
271 *
272 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
273 * takes no FXSAVE state, just the operands.
274 *
275 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
276 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full,
277 * iemOpCommonSha_FullFull_To_Full
278 */
279FNIEMOP_DEF_1(iemOpCommonAesNi_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
280{
281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
282 if (IEM_IS_MODRM_REG_MODE(bRm))
283 {
284 /*
285 * Register, register.
286 */
287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
288 IEM_MC_BEGIN(2, 0);
289 IEM_MC_ARG(PRTUINT128U, puDst, 0);
290 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
291 IEM_MC_MAYBE_RAISE_AESNI_RELATED_XCPT();
292 IEM_MC_PREPARE_SSE_USAGE();
293 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
294 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
295 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
296 IEM_MC_ADVANCE_RIP_AND_FINISH();
297 IEM_MC_END();
298 }
299 else
300 {
301 /*
302 * Register, memory.
303 */
304 IEM_MC_BEGIN(2, 2);
305 IEM_MC_ARG(PRTUINT128U, puDst, 0);
306 IEM_MC_LOCAL(RTUINT128U, uSrc);
307 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
309
310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
312 IEM_MC_MAYBE_RAISE_AESNI_RELATED_XCPT();
313 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
314
315 IEM_MC_PREPARE_SSE_USAGE();
316 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
317 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
318
319 IEM_MC_ADVANCE_RIP_AND_FINISH();
320 IEM_MC_END();
321 }
322}
323
324
325/**
326 * Common worker for SSE-style SHA instructions of the form:
327 * shaxxx xmm1, xmm2/mem128
328 *
329 * Proper alignment of the 128-bit operand is enforced.
330 * Exceptions type 4. SHA cpuid checks.
331 *
332 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
333 * takes no FXSAVE state, just the operands.
334 *
335 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
336 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full,
337 * iemOpCommonAesNi_FullFull_To_Full
338 */
339FNIEMOP_DEF_1(iemOpCommonSha_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
340{
341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
342 if (IEM_IS_MODRM_REG_MODE(bRm))
343 {
344 /*
345 * Register, register.
346 */
347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
348 IEM_MC_BEGIN(2, 0);
349 IEM_MC_ARG(PRTUINT128U, puDst, 0);
350 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
351 IEM_MC_MAYBE_RAISE_SHA_RELATED_XCPT();
352 IEM_MC_PREPARE_SSE_USAGE();
353 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
354 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
355 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
356 IEM_MC_ADVANCE_RIP_AND_FINISH();
357 IEM_MC_END();
358 }
359 else
360 {
361 /*
362 * Register, memory.
363 */
364 IEM_MC_BEGIN(2, 2);
365 IEM_MC_ARG(PRTUINT128U, puDst, 0);
366 IEM_MC_LOCAL(RTUINT128U, uSrc);
367 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
369
370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
372 IEM_MC_MAYBE_RAISE_SHA_RELATED_XCPT();
373 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
374
375 IEM_MC_PREPARE_SSE_USAGE();
376 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
377 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
378
379 IEM_MC_ADVANCE_RIP_AND_FINISH();
380 IEM_MC_END();
381 }
382}
383
384
385/** Opcode 0x0f 0x38 0x00. */
386FNIEMOP_DEF(iemOp_pshufb_Pq_Qq)
387{
388 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
389 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
390 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u64,&iemAImpl_pshufb_u64_fallback),
391 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
392}
393
394
395/** Opcode 0x66 0x0f 0x38 0x00. */
396FNIEMOP_DEF(iemOp_pshufb_Vx_Wx)
397{
398 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
399 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
400 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback));
401
402}
403
404
405/* Opcode 0x0f 0x38 0x01. */
406FNIEMOP_DEF(iemOp_phaddw_Pq_Qq)
407{
408 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
409 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
410 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u64,&iemAImpl_phaddw_u64_fallback),
411 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
412}
413
414
415/** Opcode 0x66 0x0f 0x38 0x01. */
416FNIEMOP_DEF(iemOp_phaddw_Vx_Wx)
417{
418 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
419 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
420 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback));
421
422}
423
424
425/** Opcode 0x0f 0x38 0x02. */
426FNIEMOP_DEF(iemOp_phaddd_Pq_Qq)
427{
428 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
429 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
430 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u64,&iemAImpl_phaddd_u64_fallback),
431 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
432}
433
434
435/** Opcode 0x66 0x0f 0x38 0x02. */
436FNIEMOP_DEF(iemOp_phaddd_Vx_Wx)
437{
438 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
439 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
440 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback));
441
442}
443
444
445/** Opcode 0x0f 0x38 0x03. */
446FNIEMOP_DEF(iemOp_phaddsw_Pq_Qq)
447{
448 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
449 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
450 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u64,&iemAImpl_phaddsw_u64_fallback),
451 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
452}
453
454
455/** Opcode 0x66 0x0f 0x38 0x03. */
456FNIEMOP_DEF(iemOp_phaddsw_Vx_Wx)
457{
458 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
459 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
460 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback));
461
462}
463
464
465/** Opcode 0x0f 0x38 0x04. */
466FNIEMOP_DEF(iemOp_pmaddubsw_Pq_Qq)
467{
468 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
469 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
470 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u64, &iemAImpl_pmaddubsw_u64_fallback),
471 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
472}
473
474
475/** Opcode 0x66 0x0f 0x38 0x04. */
476FNIEMOP_DEF(iemOp_pmaddubsw_Vx_Wx)
477{
478 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
479 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
480 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback));
481
482}
483
484
485/** Opcode 0x0f 0x38 0x05. */
486FNIEMOP_DEF(iemOp_phsubw_Pq_Qq)
487{
488 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
489 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
490 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u64,&iemAImpl_phsubw_u64_fallback),
491 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
492}
493
494
495/** Opcode 0x66 0x0f 0x38 0x05. */
496FNIEMOP_DEF(iemOp_phsubw_Vx_Wx)
497{
498 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
499 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
500 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback));
501
502}
503
504
505/** Opcode 0x0f 0x38 0x06. */
506FNIEMOP_DEF(iemOp_phsubd_Pq_Qq)
507{
508 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
509 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
510 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u64,&iemAImpl_phsubd_u64_fallback),
511 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
512}
513
514
515
516/** Opcode 0x66 0x0f 0x38 0x06. */
517FNIEMOP_DEF(iemOp_phsubd_Vx_Wx)
518{
519 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
520 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
521 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback));
522
523}
524
525
526/** Opcode 0x0f 0x38 0x07. */
527FNIEMOP_DEF(iemOp_phsubsw_Pq_Qq)
528{
529 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
530 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
531 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u64,&iemAImpl_phsubsw_u64_fallback),
532 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
533}
534
535
536/** Opcode 0x66 0x0f 0x38 0x07. */
537FNIEMOP_DEF(iemOp_phsubsw_Vx_Wx)
538{
539 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
540 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
541 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback));
542
543}
544
545
546/** Opcode 0x0f 0x38 0x08. */
547FNIEMOP_DEF(iemOp_psignb_Pq_Qq)
548{
549 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
550 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
551 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u64, &iemAImpl_psignb_u64_fallback),
552 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
553}
554
555
556/** Opcode 0x66 0x0f 0x38 0x08. */
557FNIEMOP_DEF(iemOp_psignb_Vx_Wx)
558{
559 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
560 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
561 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback));
562
563}
564
565
566/** Opcode 0x0f 0x38 0x09. */
567FNIEMOP_DEF(iemOp_psignw_Pq_Qq)
568{
569 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
570 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
571 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u64, &iemAImpl_psignw_u64_fallback),
572 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
573}
574
575
576/** Opcode 0x66 0x0f 0x38 0x09. */
577FNIEMOP_DEF(iemOp_psignw_Vx_Wx)
578{
579 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
580 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
581 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback));
582
583}
584
585
586/** Opcode 0x0f 0x38 0x0a. */
587FNIEMOP_DEF(iemOp_psignd_Pq_Qq)
588{
589 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
590 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
591 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u64, &iemAImpl_psignd_u64_fallback),
592 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
593}
594
595
596/** Opcode 0x66 0x0f 0x38 0x0a. */
597FNIEMOP_DEF(iemOp_psignd_Vx_Wx)
598{
599 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
600 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
601 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback));
602
603}
604
605
606/** Opcode 0x0f 0x38 0x0b. */
607FNIEMOP_DEF(iemOp_pmulhrsw_Pq_Qq)
608{
609 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
610 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
611 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u64, &iemAImpl_pmulhrsw_u64_fallback),
612 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
613}
614
615
616/** Opcode 0x66 0x0f 0x38 0x0b. */
617FNIEMOP_DEF(iemOp_pmulhrsw_Vx_Wx)
618{
619 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
620 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
621 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback));
622
623}
624
625
626/* Opcode 0x0f 0x38 0x0c - invalid. */
627/* Opcode 0x66 0x0f 0x38 0x0c - invalid (vex only). */
628/* Opcode 0x0f 0x38 0x0d - invalid. */
629/* Opcode 0x66 0x0f 0x38 0x0d - invalid (vex only). */
630/* Opcode 0x0f 0x38 0x0e - invalid. */
631/* Opcode 0x66 0x0f 0x38 0x0e - invalid (vex only). */
632/* Opcode 0x0f 0x38 0x0f - invalid. */
633/* Opcode 0x66 0x0f 0x38 0x0f - invalid (vex only). */
634
635
636/* Opcode 0x0f 0x38 0x10 - invalid */
637
638
639/** Body for the *blend* instructions. */
640#define IEMOP_BODY_P_BLEND_X(a_Instr) \
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
642 if (IEM_IS_MODRM_REG_MODE(bRm)) \
643 { \
644 /* \
645 * Register, register. \
646 */ \
647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
648 IEM_MC_BEGIN(3, 0); \
649 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
651 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
652 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
653 IEM_MC_PREPARE_SSE_USAGE(); \
654 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
655 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
656 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
657 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
658 iemAImpl_ ## a_Instr ## _u128, \
659 iemAImpl_ ## a_Instr ## _u128_fallback), \
660 puDst, puSrc, puMask); \
661 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
662 IEM_MC_END(); \
663 } \
664 else \
665 { \
666 /* \
667 * Register, memory. \
668 */ \
669 IEM_MC_BEGIN(3, 2); \
670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
671 IEM_MC_LOCAL(RTUINT128U, uSrc); \
672 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
673 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
674 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
677 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
678 IEM_MC_PREPARE_SSE_USAGE(); \
679 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
680 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
681 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
682 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
683 iemAImpl_ ## a_Instr ## _u128, \
684 iemAImpl_ ## a_Instr ## _u128_fallback), \
685 puDst, puSrc, puMask); \
686 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
687 IEM_MC_END(); \
688 } \
689 (void)0
690
691/** Opcode 0x66 0x0f 0x38 0x10 (legacy only). */
692FNIEMOP_DEF(iemOp_pblendvb_Vdq_Wdq)
693{
694 IEMOP_MNEMONIC2(RM, PBLENDVB, pblendvb, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
695 IEMOP_BODY_P_BLEND_X(pblendvb);
696}
697
698
699/* Opcode 0x0f 0x38 0x11 - invalid */
700/* Opcode 0x66 0x0f 0x38 0x11 - invalid */
701/* Opcode 0x0f 0x38 0x12 - invalid */
702/* Opcode 0x66 0x0f 0x38 0x12 - invalid */
703/* Opcode 0x0f 0x38 0x13 - invalid */
704/* Opcode 0x66 0x0f 0x38 0x13 - invalid (vex only). */
705/* Opcode 0x0f 0x38 0x14 - invalid */
706
707
708/** Opcode 0x66 0x0f 0x38 0x14 (legacy only). */
709FNIEMOP_DEF(iemOp_blendvps_Vdq_Wdq)
710{
711 IEMOP_MNEMONIC2(RM, BLENDVPS, blendvps, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
712 IEMOP_BODY_P_BLEND_X(blendvps);
713}
714
715
716/* Opcode 0x0f 0x38 0x15 - invalid */
717
718
719/** Opcode 0x66 0x0f 0x38 0x15 (legacy only). */
720FNIEMOP_DEF(iemOp_blendvpd_Vdq_Wdq)
721{
722 IEMOP_MNEMONIC2(RM, BLENDVPD, blendvpd, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
723 IEMOP_BODY_P_BLEND_X(blendvpd);
724}
725
726
727/* Opcode 0x0f 0x38 0x16 - invalid */
728/* Opcode 0x66 0x0f 0x38 0x16 - invalid (vex only). */
729/* Opcode 0x0f 0x38 0x17 - invalid */
730
731
732/** Opcode 0x66 0x0f 0x38 0x17 - invalid */
733FNIEMOP_DEF(iemOp_ptest_Vx_Wx)
734{
735 IEMOP_MNEMONIC2(RM, PTEST, ptest, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
737 if (IEM_IS_MODRM_REG_MODE(bRm))
738 {
739 /*
740 * Register, register.
741 */
742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
743 IEM_MC_BEGIN(3, 0);
744 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
745 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
746 IEM_MC_ARG(uint32_t *, pEFlags, 2);
747 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
748 IEM_MC_PREPARE_SSE_USAGE();
749 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
750 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
751 IEM_MC_REF_EFLAGS(pEFlags);
752 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756 else
757 {
758 /*
759 * Register, memory.
760 */
761 IEM_MC_BEGIN(3, 2);
762 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
763 IEM_MC_LOCAL(RTUINT128U, uSrc2);
764 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
765 IEM_MC_ARG(uint32_t *, pEFlags, 2);
766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
767
768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
770 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
771 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
772
773 IEM_MC_PREPARE_SSE_USAGE();
774 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
775 IEM_MC_REF_EFLAGS(pEFlags);
776 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
777
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781}
782
783
784/* Opcode 0x0f 0x38 0x18 - invalid */
785/* Opcode 0x66 0x0f 0x38 0x18 - invalid (vex only). */
786/* Opcode 0x0f 0x38 0x19 - invalid */
787/* Opcode 0x66 0x0f 0x38 0x19 - invalid (vex only). */
788/* Opcode 0x0f 0x38 0x1a - invalid */
789/* Opcode 0x66 0x0f 0x38 0x1a - invalid (vex only). */
790/* Opcode 0x0f 0x38 0x1b - invalid */
791/* Opcode 0x66 0x0f 0x38 0x1b - invalid */
792
793
794/** Opcode 0x0f 0x38 0x1c. */
795FNIEMOP_DEF(iemOp_pabsb_Pq_Qq)
796{
797 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
798 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
799 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u64, &iemAImpl_pabsb_u64_fallback),
800 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
801}
802
803
804/** Opcode 0x66 0x0f 0x38 0x1c. */
805FNIEMOP_DEF(iemOp_pabsb_Vx_Wx)
806{
807 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
808 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
809 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback));
810
811}
812
813
814/** Opcode 0x0f 0x38 0x1d. */
815FNIEMOP_DEF(iemOp_pabsw_Pq_Qq)
816{
817 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
818 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
819 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u64, &iemAImpl_pabsw_u64_fallback),
820 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
821}
822
823
824/** Opcode 0x66 0x0f 0x38 0x1d. */
825FNIEMOP_DEF(iemOp_pabsw_Vx_Wx)
826{
827 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
828 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
829 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback));
830
831}
832
833
834/** Opcode 0x0f 0x38 0x1e. */
835FNIEMOP_DEF(iemOp_pabsd_Pq_Qq)
836{
837 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
838 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
839 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u64, &iemAImpl_pabsd_u64_fallback),
840 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
841}
842
843
844/** Opcode 0x66 0x0f 0x38 0x1e. */
845FNIEMOP_DEF(iemOp_pabsd_Vx_Wx)
846{
847 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
848 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
849 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback));
850
851}
852
853
854/* Opcode 0x0f 0x38 0x1f - invalid */
855/* Opcode 0x66 0x0f 0x38 0x1f - invalid */
856
857
858/** Body for the pmov{s,z}x* instructions. */
859#define IEMOP_BODY_PMOV_S_Z(a_Instr, a_SrcWidth) \
860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
861 if (IEM_IS_MODRM_REG_MODE(bRm)) \
862 { \
863 /* \
864 * Register, register. \
865 */ \
866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
867 IEM_MC_BEGIN(2, 0); \
868 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
869 IEM_MC_ARG(uint64_t, uSrc, 1); \
870 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
871 IEM_MC_PREPARE_SSE_USAGE(); \
872 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword */); \
873 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
874 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
875 iemAImpl_ ## a_Instr ## _u128, \
876 iemAImpl_v ## a_Instr ## _u128_fallback), \
877 puDst, uSrc); \
878 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
879 IEM_MC_END(); \
880 } \
881 else \
882 { \
883 /* \
884 * Register, memory. \
885 */ \
886 IEM_MC_BEGIN(2, 2); \
887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
888 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
889 IEM_MC_ARG(uint ## a_SrcWidth ## _t, uSrc, 1); \
890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
892 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
893 IEM_MC_PREPARE_SSE_USAGE(); \
894 IEM_MC_FETCH_MEM_U## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
895 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
896 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
897 iemAImpl_ ## a_Instr ## _u128, \
898 iemAImpl_v ## a_Instr ## _u128_fallback), \
899 puDst, uSrc); \
900 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
901 IEM_MC_END(); \
902 } \
903 (void)0
904
905
906/** Opcode 0x66 0x0f 0x38 0x20. */
907FNIEMOP_DEF(iemOp_pmovsxbw_Vx_UxMq)
908{
909 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
910 IEMOP_MNEMONIC2(RM, PMOVSXBW, pmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
911 IEMOP_BODY_PMOV_S_Z(pmovsxbw, 64);
912}
913
914
915/** Opcode 0x66 0x0f 0x38 0x21. */
916FNIEMOP_DEF(iemOp_pmovsxbd_Vx_UxMd)
917{
918 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
919 IEMOP_MNEMONIC2(RM, PMOVSXBD, pmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
920 IEMOP_BODY_PMOV_S_Z(pmovsxbd, 32);
921}
922
923
924/** Opcode 0x66 0x0f 0x38 0x22. */
925FNIEMOP_DEF(iemOp_pmovsxbq_Vx_UxMw)
926{
927 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
928 IEMOP_MNEMONIC2(RM, PMOVSXBQ, pmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
929 IEMOP_BODY_PMOV_S_Z(pmovsxbq, 16);
930}
931
932
933/** Opcode 0x66 0x0f 0x38 0x23. */
934FNIEMOP_DEF(iemOp_pmovsxwd_Vx_UxMq)
935{
936 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
937 IEMOP_MNEMONIC2(RM, PMOVSXWD, pmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
938 IEMOP_BODY_PMOV_S_Z(pmovsxwd, 64);
939}
940
941
942/** Opcode 0x66 0x0f 0x38 0x24. */
943FNIEMOP_DEF(iemOp_pmovsxwq_Vx_UxMd)
944{
945 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
946 IEMOP_MNEMONIC2(RM, PMOVSXWQ, pmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
947 IEMOP_BODY_PMOV_S_Z(pmovsxwq, 32);
948}
949
950
951/** Opcode 0x66 0x0f 0x38 0x25. */
952FNIEMOP_DEF(iemOp_pmovsxdq_Vx_UxMq)
953{
954 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
955 IEMOP_MNEMONIC2(RM, PMOVSXDQ, pmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
956 IEMOP_BODY_PMOV_S_Z(pmovsxdq, 64);
957}
958
959
960/* Opcode 0x66 0x0f 0x38 0x26 - invalid */
961/* Opcode 0x66 0x0f 0x38 0x27 - invalid */
962
963
964/** Opcode 0x66 0x0f 0x38 0x28. */
965FNIEMOP_DEF(iemOp_pmuldq_Vx_Wx)
966{
967 IEMOP_MNEMONIC2(RM, PMULDQ, pmuldq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
968 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
969 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback));
970}
971
972
973/** Opcode 0x66 0x0f 0x38 0x29. */
974FNIEMOP_DEF(iemOp_pcmpeqq_Vx_Wx)
975{
976 IEMOP_MNEMONIC2(RM, PCMPEQQ, pcmpeqq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
977 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
978 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback));
979}
980
981
982/**
983 * @opcode 0x2a
984 * @opcodesub !11 mr/reg
985 * @oppfx 0x66
986 * @opcpuid sse4.1
987 * @opgroup og_sse41_cachect
988 * @opxcpttype 1
989 * @optest op1=-1 op2=2 -> op1=2
990 * @optest op1=0 op2=-42 -> op1=-42
991 */
992FNIEMOP_DEF(iemOp_movntdqa_Vdq_Mdq)
993{
994 IEMOP_MNEMONIC2(RM_MEM, MOVNTDQA, movntdqa, Vdq_WO, Mdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
996 if (IEM_IS_MODRM_MEM_MODE(bRm))
997 {
998 /* Register, memory. */
999 IEM_MC_BEGIN(0, 2);
1000 IEM_MC_LOCAL(RTUINT128U, uSrc);
1001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1002
1003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1005 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
1006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1007
1008 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1009 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1010
1011 IEM_MC_ADVANCE_RIP_AND_FINISH();
1012 IEM_MC_END();
1013 }
1014
1015 /**
1016 * @opdone
1017 * @opmnemonic ud660f382areg
1018 * @opcode 0x2a
1019 * @opcodesub 11 mr/reg
1020 * @oppfx 0x66
1021 * @opunused immediate
1022 * @opcpuid sse
1023 * @optest ->
1024 */
1025 else
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x66 0x0f 0x38 0x2b. */
1031FNIEMOP_DEF(iemOp_packusdw_Vx_Wx)
1032{
1033 IEMOP_MNEMONIC2(RM, PACKUSDW, packusdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
1034 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full, iemAImpl_packusdw_u128);
1035}
1036
1037
1038/* Opcode 0x66 0x0f 0x38 0x2c - invalid (vex only). */
1039/* Opcode 0x66 0x0f 0x38 0x2d - invalid (vex only). */
1040/* Opcode 0x66 0x0f 0x38 0x2e - invalid (vex only). */
1041/* Opcode 0x66 0x0f 0x38 0x2f - invalid (vex only). */
1042
1043/** Opcode 0x66 0x0f 0x38 0x30. */
1044FNIEMOP_DEF(iemOp_pmovzxbw_Vx_UxMq)
1045{
1046 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1047 IEMOP_MNEMONIC2(RM, PMOVZXBW, pmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1048 IEMOP_BODY_PMOV_S_Z(pmovzxbw, 64);
1049}
1050
1051
1052/** Opcode 0x66 0x0f 0x38 0x31. */
1053FNIEMOP_DEF(iemOp_pmovzxbd_Vx_UxMd)
1054{
1055 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1056 IEMOP_MNEMONIC2(RM, PMOVZXBD, pmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_PMOV_S_Z(pmovzxbd, 32);
1058}
1059
1060
1061/** Opcode 0x66 0x0f 0x38 0x32. */
1062FNIEMOP_DEF(iemOp_pmovzxbq_Vx_UxMw)
1063{
1064 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1065 IEMOP_MNEMONIC2(RM, PMOVZXBQ, pmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1066 IEMOP_BODY_PMOV_S_Z(pmovzxbq, 16);
1067}
1068
1069
1070/** Opcode 0x66 0x0f 0x38 0x33. */
1071FNIEMOP_DEF(iemOp_pmovzxwd_Vx_UxMq)
1072{
1073 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1074 IEMOP_MNEMONIC2(RM, PMOVZXWD, pmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1075 IEMOP_BODY_PMOV_S_Z(pmovzxwd, 64);
1076}
1077
1078
1079/** Opcode 0x66 0x0f 0x38 0x34. */
1080FNIEMOP_DEF(iemOp_pmovzxwq_Vx_UxMd)
1081{
1082 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1083 IEMOP_MNEMONIC2(RM, PMOVZXWQ, pmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1084 IEMOP_BODY_PMOV_S_Z(pmovzxwq, 32);
1085}
1086
1087
1088/** Opcode 0x66 0x0f 0x38 0x35. */
1089FNIEMOP_DEF(iemOp_pmovzxdq_Vx_UxMq)
1090{
1091 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1092 IEMOP_MNEMONIC2(RM, PMOVZXDQ, pmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1093 IEMOP_BODY_PMOV_S_Z(pmovzxdq, 64);
1094}
1095
1096
1097/* Opcode 0x66 0x0f 0x38 0x36 - invalid (vex only). */
1098
1099
1100/** Opcode 0x66 0x0f 0x38 0x37. */
1101FNIEMOP_DEF(iemOp_pcmpgtq_Vx_Wx)
1102{
1103 IEMOP_MNEMONIC2(RM, PCMPGTQ, pcmpgtq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1104 return FNIEMOP_CALL_1(iemOpCommonSse42_FullFull_To_Full,
1105 IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback));
1106}
1107
1108
1109/** Opcode 0x66 0x0f 0x38 0x38. */
1110FNIEMOP_DEF(iemOp_pminsb_Vx_Wx)
1111{
1112 IEMOP_MNEMONIC2(RM, PMINSB, pminsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1113 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1114 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback));
1115}
1116
1117
1118/** Opcode 0x66 0x0f 0x38 0x39. */
1119FNIEMOP_DEF(iemOp_pminsd_Vx_Wx)
1120{
1121 IEMOP_MNEMONIC2(RM, PMINSD, pminsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1122 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1123 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback));
1124}
1125
1126
1127/** Opcode 0x66 0x0f 0x38 0x3a. */
1128FNIEMOP_DEF(iemOp_pminuw_Vx_Wx)
1129{
1130 IEMOP_MNEMONIC2(RM, PMINUW, pminuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1131 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1132 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback));
1133}
1134
1135
1136/** Opcode 0x66 0x0f 0x38 0x3b. */
1137FNIEMOP_DEF(iemOp_pminud_Vx_Wx)
1138{
1139 IEMOP_MNEMONIC2(RM, PMINUD, pminud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1140 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1141 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback));
1142}
1143
1144
1145/** Opcode 0x66 0x0f 0x38 0x3c. */
1146FNIEMOP_DEF(iemOp_pmaxsb_Vx_Wx)
1147{
1148 IEMOP_MNEMONIC2(RM, PMAXSB, pmaxsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1149 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1150 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback));
1151}
1152
1153
1154/** Opcode 0x66 0x0f 0x38 0x3d. */
1155FNIEMOP_DEF(iemOp_pmaxsd_Vx_Wx)
1156{
1157 IEMOP_MNEMONIC2(RM, PMAXSD, pmaxsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1158 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1159 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback));
1160}
1161
1162
1163/** Opcode 0x66 0x0f 0x38 0x3e. */
1164FNIEMOP_DEF(iemOp_pmaxuw_Vx_Wx)
1165{
1166 IEMOP_MNEMONIC2(RM, PMAXUW, pmaxuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1167 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1168 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback));
1169}
1170
1171
1172/** Opcode 0x66 0x0f 0x38 0x3f. */
1173FNIEMOP_DEF(iemOp_pmaxud_Vx_Wx)
1174{
1175 IEMOP_MNEMONIC2(RM, PMAXUD, pmaxud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1176 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1177 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback));
1178}
1179
1180
1181/** Opcode 0x66 0x0f 0x38 0x40. */
1182FNIEMOP_DEF(iemOp_pmulld_Vx_Wx)
1183{
1184 IEMOP_MNEMONIC2(RM, PMULLD, pmulld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1185 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1186 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback));
1187}
1188
1189
1190/** Opcode 0x66 0x0f 0x38 0x41. */
1191FNIEMOP_DEF(iemOp_phminposuw_Vdq_Wdq)
1192{
1193 IEMOP_MNEMONIC2(RM, PHMINPOSUW, phminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1194 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
1195 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_phminposuw_u128, iemAImpl_phminposuw_u128_fallback));
1196}
1197
1198
1199/* Opcode 0x66 0x0f 0x38 0x42 - invalid. */
1200/* Opcode 0x66 0x0f 0x38 0x43 - invalid. */
1201/* Opcode 0x66 0x0f 0x38 0x44 - invalid. */
1202/* Opcode 0x66 0x0f 0x38 0x45 - invalid (vex only). */
1203/* Opcode 0x66 0x0f 0x38 0x46 - invalid (vex only). */
1204/* Opcode 0x66 0x0f 0x38 0x47 - invalid (vex only). */
1205/* Opcode 0x66 0x0f 0x38 0x48 - invalid. */
1206/* Opcode 0x66 0x0f 0x38 0x49 - invalid. */
1207/* Opcode 0x66 0x0f 0x38 0x4a - invalid. */
1208/* Opcode 0x66 0x0f 0x38 0x4b - invalid. */
1209/* Opcode 0x66 0x0f 0x38 0x4c - invalid. */
1210/* Opcode 0x66 0x0f 0x38 0x4d - invalid. */
1211/* Opcode 0x66 0x0f 0x38 0x4e - invalid. */
1212/* Opcode 0x66 0x0f 0x38 0x4f - invalid. */
1213
1214/* Opcode 0x66 0x0f 0x38 0x50 - invalid. */
1215/* Opcode 0x66 0x0f 0x38 0x51 - invalid. */
1216/* Opcode 0x66 0x0f 0x38 0x52 - invalid. */
1217/* Opcode 0x66 0x0f 0x38 0x53 - invalid. */
1218/* Opcode 0x66 0x0f 0x38 0x54 - invalid. */
1219/* Opcode 0x66 0x0f 0x38 0x55 - invalid. */
1220/* Opcode 0x66 0x0f 0x38 0x56 - invalid. */
1221/* Opcode 0x66 0x0f 0x38 0x57 - invalid. */
1222/* Opcode 0x66 0x0f 0x38 0x58 - invalid (vex only). */
1223/* Opcode 0x66 0x0f 0x38 0x59 - invalid (vex only). */
1224/* Opcode 0x66 0x0f 0x38 0x5a - invalid (vex only). */
1225/* Opcode 0x66 0x0f 0x38 0x5b - invalid. */
1226/* Opcode 0x66 0x0f 0x38 0x5c - invalid. */
1227/* Opcode 0x66 0x0f 0x38 0x5d - invalid. */
1228/* Opcode 0x66 0x0f 0x38 0x5e - invalid. */
1229/* Opcode 0x66 0x0f 0x38 0x5f - invalid. */
1230
1231/* Opcode 0x66 0x0f 0x38 0x60 - invalid. */
1232/* Opcode 0x66 0x0f 0x38 0x61 - invalid. */
1233/* Opcode 0x66 0x0f 0x38 0x62 - invalid. */
1234/* Opcode 0x66 0x0f 0x38 0x63 - invalid. */
1235/* Opcode 0x66 0x0f 0x38 0x64 - invalid. */
1236/* Opcode 0x66 0x0f 0x38 0x65 - invalid. */
1237/* Opcode 0x66 0x0f 0x38 0x66 - invalid. */
1238/* Opcode 0x66 0x0f 0x38 0x67 - invalid. */
1239/* Opcode 0x66 0x0f 0x38 0x68 - invalid. */
1240/* Opcode 0x66 0x0f 0x38 0x69 - invalid. */
1241/* Opcode 0x66 0x0f 0x38 0x6a - invalid. */
1242/* Opcode 0x66 0x0f 0x38 0x6b - invalid. */
1243/* Opcode 0x66 0x0f 0x38 0x6c - invalid. */
1244/* Opcode 0x66 0x0f 0x38 0x6d - invalid. */
1245/* Opcode 0x66 0x0f 0x38 0x6e - invalid. */
1246/* Opcode 0x66 0x0f 0x38 0x6f - invalid. */
1247
1248/* Opcode 0x66 0x0f 0x38 0x70 - invalid. */
1249/* Opcode 0x66 0x0f 0x38 0x71 - invalid. */
1250/* Opcode 0x66 0x0f 0x38 0x72 - invalid. */
1251/* Opcode 0x66 0x0f 0x38 0x73 - invalid. */
1252/* Opcode 0x66 0x0f 0x38 0x74 - invalid. */
1253/* Opcode 0x66 0x0f 0x38 0x75 - invalid. */
1254/* Opcode 0x66 0x0f 0x38 0x76 - invalid. */
1255/* Opcode 0x66 0x0f 0x38 0x77 - invalid. */
1256/* Opcode 0x66 0x0f 0x38 0x78 - invalid (vex only). */
1257/* Opcode 0x66 0x0f 0x38 0x79 - invalid (vex only). */
1258/* Opcode 0x66 0x0f 0x38 0x7a - invalid. */
1259/* Opcode 0x66 0x0f 0x38 0x7b - invalid. */
1260/* Opcode 0x66 0x0f 0x38 0x7c - invalid. */
1261/* Opcode 0x66 0x0f 0x38 0x7d - invalid. */
1262/* Opcode 0x66 0x0f 0x38 0x7e - invalid. */
1263/* Opcode 0x66 0x0f 0x38 0x7f - invalid. */
1264
1265/** Opcode 0x66 0x0f 0x38 0x80. */
1266#ifdef VBOX_WITH_NESTED_HWVIRT_VMX_EPT
1267FNIEMOP_DEF(iemOp_invept_Gy_Mdq)
1268{
1269 IEMOP_MNEMONIC(invept, "invept Gy,Mdq");
1270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1271 IEMOP_HLP_IN_VMX_OPERATION("invept", kVmxVDiag_Invept);
1272 IEMOP_HLP_VMX_INSTR("invept", kVmxVDiag_Invept);
1273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1274 if (IEM_IS_MODRM_MEM_MODE(bRm))
1275 {
1276 /* Register, memory. */
1277 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1278 {
1279 IEM_MC_BEGIN(3, 0);
1280 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1281 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1282 IEM_MC_ARG(uint64_t, uInveptType, 2);
1283 IEM_MC_FETCH_GREG_U64(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1285 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1286 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1287 IEM_MC_END();
1288 }
1289 else
1290 {
1291 IEM_MC_BEGIN(3, 0);
1292 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1293 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1294 IEM_MC_ARG(uint32_t, uInveptType, 2);
1295 IEM_MC_FETCH_GREG_U32(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1297 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1298 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1299 IEM_MC_END();
1300 }
1301 }
1302 Log(("iemOp_invept_Gy_Mdq: invalid encoding -> #UD\n"));
1303 return IEMOP_RAISE_INVALID_OPCODE();
1304}
1305#else
1306FNIEMOP_STUB(iemOp_invept_Gy_Mdq);
1307#endif
1308
1309/** Opcode 0x66 0x0f 0x38 0x81. */
1310#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1311FNIEMOP_DEF(iemOp_invvpid_Gy_Mdq)
1312{
1313 IEMOP_MNEMONIC(invvpid, "invvpid Gy,Mdq");
1314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1315 IEMOP_HLP_IN_VMX_OPERATION("invvpid", kVmxVDiag_Invvpid);
1316 IEMOP_HLP_VMX_INSTR("invvpid", kVmxVDiag_Invvpid);
1317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1318 if (IEM_IS_MODRM_MEM_MODE(bRm))
1319 {
1320 /* Register, memory. */
1321 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1322 {
1323 IEM_MC_BEGIN(3, 0);
1324 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1325 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1326 IEM_MC_ARG(uint64_t, uInvvpidType, 2);
1327 IEM_MC_FETCH_GREG_U64(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1329 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1330 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1331 IEM_MC_END();
1332 }
1333 else
1334 {
1335 IEM_MC_BEGIN(3, 0);
1336 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1337 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1338 IEM_MC_ARG(uint32_t, uInvvpidType, 2);
1339 IEM_MC_FETCH_GREG_U32(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1341 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1342 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1343 IEM_MC_END();
1344 }
1345 }
1346 Log(("iemOp_invvpid_Gy_Mdq: invalid encoding -> #UD\n"));
1347 return IEMOP_RAISE_INVALID_OPCODE();
1348}
1349#else
1350FNIEMOP_STUB(iemOp_invvpid_Gy_Mdq);
1351#endif
1352
1353/** Opcode 0x66 0x0f 0x38 0x82. */
1354FNIEMOP_DEF(iemOp_invpcid_Gy_Mdq)
1355{
1356 IEMOP_MNEMONIC(invpcid, "invpcid Gy,Mdq");
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1359 if (IEM_IS_MODRM_MEM_MODE(bRm))
1360 {
1361 /* Register, memory. */
1362 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1363 {
1364 IEM_MC_BEGIN(3, 0);
1365 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1366 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1367 IEM_MC_ARG(uint64_t, uInvpcidType, 2);
1368 IEM_MC_FETCH_GREG_U64(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1370 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1371 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1372 IEM_MC_END();
1373 }
1374 else
1375 {
1376 IEM_MC_BEGIN(3, 0);
1377 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1378 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1379 IEM_MC_ARG(uint32_t, uInvpcidType, 2);
1380 IEM_MC_FETCH_GREG_U32(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1382 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1383 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1384 IEM_MC_END();
1385 }
1386 }
1387 Log(("iemOp_invpcid_Gy_Mdq: invalid encoding -> #UD\n"));
1388 return IEMOP_RAISE_INVALID_OPCODE();
1389}
1390
1391
1392/* Opcode 0x66 0x0f 0x38 0x83 - invalid. */
1393/* Opcode 0x66 0x0f 0x38 0x84 - invalid. */
1394/* Opcode 0x66 0x0f 0x38 0x85 - invalid. */
1395/* Opcode 0x66 0x0f 0x38 0x86 - invalid. */
1396/* Opcode 0x66 0x0f 0x38 0x87 - invalid. */
1397/* Opcode 0x66 0x0f 0x38 0x88 - invalid. */
1398/* Opcode 0x66 0x0f 0x38 0x89 - invalid. */
1399/* Opcode 0x66 0x0f 0x38 0x8a - invalid. */
1400/* Opcode 0x66 0x0f 0x38 0x8b - invalid. */
1401/* Opcode 0x66 0x0f 0x38 0x8c - invalid (vex only). */
1402/* Opcode 0x66 0x0f 0x38 0x8d - invalid. */
1403/* Opcode 0x66 0x0f 0x38 0x8e - invalid (vex only). */
1404/* Opcode 0x66 0x0f 0x38 0x8f - invalid. */
1405
1406/* Opcode 0x66 0x0f 0x38 0x90 - invalid (vex only). */
1407/* Opcode 0x66 0x0f 0x38 0x91 - invalid (vex only). */
1408/* Opcode 0x66 0x0f 0x38 0x92 - invalid (vex only). */
1409/* Opcode 0x66 0x0f 0x38 0x93 - invalid (vex only). */
1410/* Opcode 0x66 0x0f 0x38 0x94 - invalid. */
1411/* Opcode 0x66 0x0f 0x38 0x95 - invalid. */
1412/* Opcode 0x66 0x0f 0x38 0x96 - invalid (vex only). */
1413/* Opcode 0x66 0x0f 0x38 0x97 - invalid (vex only). */
1414/* Opcode 0x66 0x0f 0x38 0x98 - invalid (vex only). */
1415/* Opcode 0x66 0x0f 0x38 0x99 - invalid (vex only). */
1416/* Opcode 0x66 0x0f 0x38 0x9a - invalid (vex only). */
1417/* Opcode 0x66 0x0f 0x38 0x9b - invalid (vex only). */
1418/* Opcode 0x66 0x0f 0x38 0x9c - invalid (vex only). */
1419/* Opcode 0x66 0x0f 0x38 0x9d - invalid (vex only). */
1420/* Opcode 0x66 0x0f 0x38 0x9e - invalid (vex only). */
1421/* Opcode 0x66 0x0f 0x38 0x9f - invalid (vex only). */
1422
1423/* Opcode 0x66 0x0f 0x38 0xa0 - invalid. */
1424/* Opcode 0x66 0x0f 0x38 0xa1 - invalid. */
1425/* Opcode 0x66 0x0f 0x38 0xa2 - invalid. */
1426/* Opcode 0x66 0x0f 0x38 0xa3 - invalid. */
1427/* Opcode 0x66 0x0f 0x38 0xa4 - invalid. */
1428/* Opcode 0x66 0x0f 0x38 0xa5 - invalid. */
1429/* Opcode 0x66 0x0f 0x38 0xa6 - invalid (vex only). */
1430/* Opcode 0x66 0x0f 0x38 0xa7 - invalid (vex only). */
1431/* Opcode 0x66 0x0f 0x38 0xa8 - invalid (vex only). */
1432/* Opcode 0x66 0x0f 0x38 0xa9 - invalid (vex only). */
1433/* Opcode 0x66 0x0f 0x38 0xaa - invalid (vex only). */
1434/* Opcode 0x66 0x0f 0x38 0xab - invalid (vex only). */
1435/* Opcode 0x66 0x0f 0x38 0xac - invalid (vex only). */
1436/* Opcode 0x66 0x0f 0x38 0xad - invalid (vex only). */
1437/* Opcode 0x66 0x0f 0x38 0xae - invalid (vex only). */
1438/* Opcode 0x66 0x0f 0x38 0xaf - invalid (vex only). */
1439
1440/* Opcode 0x66 0x0f 0x38 0xb0 - invalid. */
1441/* Opcode 0x66 0x0f 0x38 0xb1 - invalid. */
1442/* Opcode 0x66 0x0f 0x38 0xb2 - invalid. */
1443/* Opcode 0x66 0x0f 0x38 0xb3 - invalid. */
1444/* Opcode 0x66 0x0f 0x38 0xb4 - invalid. */
1445/* Opcode 0x66 0x0f 0x38 0xb5 - invalid. */
1446/* Opcode 0x66 0x0f 0x38 0xb6 - invalid (vex only). */
1447/* Opcode 0x66 0x0f 0x38 0xb7 - invalid (vex only). */
1448/* Opcode 0x66 0x0f 0x38 0xb8 - invalid (vex only). */
1449/* Opcode 0x66 0x0f 0x38 0xb9 - invalid (vex only). */
1450/* Opcode 0x66 0x0f 0x38 0xba - invalid (vex only). */
1451/* Opcode 0x66 0x0f 0x38 0xbb - invalid (vex only). */
1452/* Opcode 0x66 0x0f 0x38 0xbc - invalid (vex only). */
1453/* Opcode 0x66 0x0f 0x38 0xbd - invalid (vex only). */
1454/* Opcode 0x66 0x0f 0x38 0xbe - invalid (vex only). */
1455/* Opcode 0x66 0x0f 0x38 0xbf - invalid (vex only). */
1456
1457/* Opcode 0x0f 0x38 0xc0 - invalid. */
1458/* Opcode 0x66 0x0f 0x38 0xc0 - invalid. */
1459/* Opcode 0x0f 0x38 0xc1 - invalid. */
1460/* Opcode 0x66 0x0f 0x38 0xc1 - invalid. */
1461/* Opcode 0x0f 0x38 0xc2 - invalid. */
1462/* Opcode 0x66 0x0f 0x38 0xc2 - invalid. */
1463/* Opcode 0x0f 0x38 0xc3 - invalid. */
1464/* Opcode 0x66 0x0f 0x38 0xc3 - invalid. */
1465/* Opcode 0x0f 0x38 0xc4 - invalid. */
1466/* Opcode 0x66 0x0f 0x38 0xc4 - invalid. */
1467/* Opcode 0x0f 0x38 0xc5 - invalid. */
1468/* Opcode 0x66 0x0f 0x38 0xc5 - invalid. */
1469/* Opcode 0x0f 0x38 0xc6 - invalid. */
1470/* Opcode 0x66 0x0f 0x38 0xc6 - invalid. */
1471/* Opcode 0x0f 0x38 0xc7 - invalid. */
1472/* Opcode 0x66 0x0f 0x38 0xc7 - invalid. */
1473
1474
1475/** Opcode 0x0f 0x38 0xc8. */
1476FNIEMOP_DEF(iemOp_sha1nexte_Vdq_Wdq)
1477{
1478 IEMOP_MNEMONIC2(RM, SHA1NEXTE, sha1nexte, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1479 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1480 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha1nexte_u128, iemAImpl_sha1nexte_u128_fallback));
1481}
1482
1483
1484/* Opcode 0x66 0x0f 0x38 0xc8 - invalid. */
1485
1486
1487/** Opcode 0x0f 0x38 0xc9. */
1488FNIEMOP_DEF(iemOp_sha1msg1_Vdq_Wdq)
1489{
1490 IEMOP_MNEMONIC2(RM, SHA1MSG1, sha1msg1, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1491 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1492 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha1msg1_u128, iemAImpl_sha1msg1_u128_fallback));
1493}
1494
1495
1496/* Opcode 0x66 0x0f 0x38 0xc9 - invalid. */
1497
1498
1499/** Opcode 0x0f 0x38 0xca. */
1500FNIEMOP_DEF(iemOp_sha1msg2_Vdq_Wdq)
1501{
1502 IEMOP_MNEMONIC2(RM, SHA1MSG2, sha1msg2, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1503 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1504 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha1msg2_u128, iemAImpl_sha1msg2_u128_fallback));
1505}
1506
1507
1508/* Opcode 0x66 0x0f 0x38 0xca - invalid. */
1509
1510
1511/** Opcode 0x0f 0x38 0xcb. */
1512FNIEMOP_DEF(iemOp_sha256rnds2_Vdq_Wdq)
1513{
1514 IEMOP_MNEMONIC2(RM, SHA256RNDS2, sha256rnds2, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo Actually RMI with implicit XMM0 */
1515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1516 if (IEM_IS_MODRM_REG_MODE(bRm))
1517 {
1518 /*
1519 * Register, register.
1520 */
1521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1522 IEM_MC_BEGIN(3, 0);
1523 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1524 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1525 IEM_MC_ARG(PCRTUINT128U, puXmm0, 2);
1526 IEM_MC_MAYBE_RAISE_SHA_RELATED_XCPT();
1527 IEM_MC_PREPARE_SSE_USAGE();
1528 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1529 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1530 IEM_MC_REF_XREG_U128_CONST(puXmm0, 0);
1531 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha256rnds2_u128, iemAImpl_sha256rnds2_u128_fallback),
1532 puDst, puSrc, puXmm0);
1533 IEM_MC_ADVANCE_RIP_AND_FINISH();
1534 IEM_MC_END();
1535 }
1536 else
1537 {
1538 /*
1539 * Register, memory.
1540 */
1541 IEM_MC_BEGIN(3, 2);
1542 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1543 IEM_MC_LOCAL(RTUINT128U, uSrc);
1544 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1545 IEM_MC_ARG(PCRTUINT128U, puXmm0, 2);
1546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1547
1548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1550 IEM_MC_MAYBE_RAISE_SHA_RELATED_XCPT();
1551 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1552
1553 IEM_MC_PREPARE_SSE_USAGE();
1554 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1555 IEM_MC_REF_XREG_U128_CONST(puXmm0, 0);
1556 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha256rnds2_u128, iemAImpl_sha256rnds2_u128_fallback),
1557 puDst, puSrc, puXmm0);
1558 IEM_MC_ADVANCE_RIP_AND_FINISH();
1559 IEM_MC_END();
1560 }
1561}
1562
1563
1564/* Opcode 0x66 0x0f 0x38 0xcb - invalid. */
1565
1566
1567/** Opcode 0x0f 0x38 0xcc. */
1568FNIEMOP_DEF(iemOp_sha256msg1_Vdq_Wdq)
1569{
1570 IEMOP_MNEMONIC2(RM, SHA256MSG1, sha256msg1, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1571 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1572 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha256msg1_u128, iemAImpl_sha256msg1_u128_fallback));
1573}
1574
1575
1576/* Opcode 0x66 0x0f 0x38 0xcc - invalid. */
1577
1578
1579/** Opcode 0x0f 0x38 0xcd. */
1580FNIEMOP_DEF(iemOp_sha256msg2_Vdq_Wdq)
1581{
1582 IEMOP_MNEMONIC2(RM, SHA256MSG2, sha256msg2, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1583 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1584 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha256msg2_u128, iemAImpl_sha256msg2_u128_fallback));
1585}
1586
1587
1588/* Opcode 0x66 0x0f 0x38 0xcd - invalid. */
1589/* Opcode 0x0f 0x38 0xce - invalid. */
1590/* Opcode 0x66 0x0f 0x38 0xce - invalid. */
1591/* Opcode 0x0f 0x38 0xcf - invalid. */
1592/* Opcode 0x66 0x0f 0x38 0xcf - invalid. */
1593
1594/* Opcode 0x66 0x0f 0x38 0xd0 - invalid. */
1595/* Opcode 0x66 0x0f 0x38 0xd1 - invalid. */
1596/* Opcode 0x66 0x0f 0x38 0xd2 - invalid. */
1597/* Opcode 0x66 0x0f 0x38 0xd3 - invalid. */
1598/* Opcode 0x66 0x0f 0x38 0xd4 - invalid. */
1599/* Opcode 0x66 0x0f 0x38 0xd5 - invalid. */
1600/* Opcode 0x66 0x0f 0x38 0xd6 - invalid. */
1601/* Opcode 0x66 0x0f 0x38 0xd7 - invalid. */
1602/* Opcode 0x66 0x0f 0x38 0xd8 - invalid. */
1603/* Opcode 0x66 0x0f 0x38 0xd9 - invalid. */
1604/* Opcode 0x66 0x0f 0x38 0xda - invalid. */
1605
1606
1607/** Opcode 0x66 0x0f 0x38 0xdb. */
1608FNIEMOP_DEF(iemOp_aesimc_Vdq_Wdq)
1609{
1610 IEMOP_MNEMONIC2(RM, AESIMC, aesimc, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1611 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1612 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesimc_u128, iemAImpl_aesimc_u128_fallback));
1613}
1614
1615
1616/** Opcode 0x66 0x0f 0x38 0xdc. */
1617FNIEMOP_DEF(iemOp_aesenc_Vdq_Wdq)
1618{
1619 IEMOP_MNEMONIC2(RM, AESENC, aesenc, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1620 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1621 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesenc_u128, iemAImpl_aesenc_u128_fallback));
1622}
1623
1624
1625/** Opcode 0x66 0x0f 0x38 0xdd. */
1626FNIEMOP_DEF(iemOp_aesenclast_Vdq_Wdq)
1627{
1628 IEMOP_MNEMONIC2(RM, AESENCLAST, aesenclast, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1629 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1630 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesenclast_u128, iemAImpl_aesenclast_u128_fallback));
1631}
1632
1633
1634/** Opcode 0x66 0x0f 0x38 0xde. */
1635FNIEMOP_DEF(iemOp_aesdec_Vdq_Wdq)
1636{
1637 IEMOP_MNEMONIC2(RM, AESDEC, aesdec, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1638 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1639 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesdec_u128, iemAImpl_aesdec_u128_fallback));
1640}
1641
1642
1643/** Opcode 0x66 0x0f 0x38 0xdf. */
1644FNIEMOP_DEF(iemOp_aesdeclast_Vdq_Wdq)
1645{
1646 IEMOP_MNEMONIC2(RM, AESDECLAST, aesdeclast, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1647 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1648 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesdeclast_u128, iemAImpl_aesdeclast_u128_fallback));
1649}
1650
1651
1652/* Opcode 0x66 0x0f 0x38 0xe0 - invalid. */
1653/* Opcode 0x66 0x0f 0x38 0xe1 - invalid. */
1654/* Opcode 0x66 0x0f 0x38 0xe2 - invalid. */
1655/* Opcode 0x66 0x0f 0x38 0xe3 - invalid. */
1656/* Opcode 0x66 0x0f 0x38 0xe4 - invalid. */
1657/* Opcode 0x66 0x0f 0x38 0xe5 - invalid. */
1658/* Opcode 0x66 0x0f 0x38 0xe6 - invalid. */
1659/* Opcode 0x66 0x0f 0x38 0xe7 - invalid. */
1660/* Opcode 0x66 0x0f 0x38 0xe8 - invalid. */
1661/* Opcode 0x66 0x0f 0x38 0xe9 - invalid. */
1662/* Opcode 0x66 0x0f 0x38 0xea - invalid. */
1663/* Opcode 0x66 0x0f 0x38 0xeb - invalid. */
1664/* Opcode 0x66 0x0f 0x38 0xec - invalid. */
1665/* Opcode 0x66 0x0f 0x38 0xed - invalid. */
1666/* Opcode 0x66 0x0f 0x38 0xee - invalid. */
1667/* Opcode 0x66 0x0f 0x38 0xef - invalid. */
1668
1669
1670/** Opcode [0x66] 0x0f 0x38 0xf0. */
1671FNIEMOP_DEF(iemOp_movbe_Gv_Mv)
1672{
1673 IEMOP_MNEMONIC2(RM, MOVBE, movbe, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1674 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovBe)
1675 return iemOp_InvalidNeedRM(pVCpu);
1676
1677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1678 if (!IEM_IS_MODRM_REG_MODE(bRm))
1679 {
1680 /*
1681 * Register, memory.
1682 */
1683 switch (pVCpu->iem.s.enmEffOpSize)
1684 {
1685 case IEMMODE_16BIT:
1686 IEM_MC_BEGIN(0, 2);
1687 IEM_MC_LOCAL(uint16_t, uSrc);
1688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1689
1690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1692 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1693
1694 IEM_MC_BSWAP_LOCAL_U16(uSrc);
1695 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1696
1697 IEM_MC_ADVANCE_RIP_AND_FINISH();
1698 IEM_MC_END();
1699 break;
1700
1701 case IEMMODE_32BIT:
1702 IEM_MC_BEGIN(0, 2);
1703 IEM_MC_LOCAL(uint32_t, uSrc);
1704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1705
1706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1708 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1709
1710 IEM_MC_BSWAP_LOCAL_U32(uSrc);
1711 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1712
1713 IEM_MC_ADVANCE_RIP_AND_FINISH();
1714 IEM_MC_END();
1715 break;
1716
1717 case IEMMODE_64BIT:
1718 IEM_MC_BEGIN(0, 2);
1719 IEM_MC_LOCAL(uint64_t, uSrc);
1720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1721
1722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1724 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1725
1726 IEM_MC_BSWAP_LOCAL_U64(uSrc);
1727 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1728
1729 IEM_MC_ADVANCE_RIP_AND_FINISH();
1730 IEM_MC_END();
1731 break;
1732
1733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1734 }
1735 }
1736 else
1737 {
1738 /* Reg/reg not supported. */
1739 return IEMOP_RAISE_INVALID_OPCODE();
1740 }
1741}
1742
1743
1744/* Opcode 0xf3 0x0f 0x38 0xf0 - invalid. */
1745
1746
1747/** Opcode 0xf2 0x0f 0x38 0xf0. */
1748FNIEMOP_DEF(iemOp_crc32_Gd_Eb)
1749{
1750 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Eb, DISOPTYPE_HARMLESS, 0);
1751 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1752 return iemOp_InvalidNeedRM(pVCpu);
1753
1754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1755 if (IEM_IS_MODRM_REG_MODE(bRm))
1756 {
1757 /*
1758 * Register, register.
1759 */
1760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1761 IEM_MC_BEGIN(2, 0);
1762 IEM_MC_ARG(uint32_t *, puDst, 0);
1763 IEM_MC_ARG(uint8_t, uSrc, 1);
1764 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1765 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1766 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1767 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1768 IEM_MC_ADVANCE_RIP_AND_FINISH();
1769 IEM_MC_END();
1770 }
1771 else
1772 {
1773 /*
1774 * Register, memory.
1775 */
1776 IEM_MC_BEGIN(2, 1);
1777 IEM_MC_ARG(uint32_t *, puDst, 0);
1778 IEM_MC_ARG(uint8_t, uSrc, 1);
1779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1780
1781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1783 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1784
1785 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1786 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1787 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1788
1789 IEM_MC_ADVANCE_RIP_AND_FINISH();
1790 IEM_MC_END();
1791 }
1792}
1793
1794
1795/** Opcode [0x66] 0x0f 0x38 0xf1. */
1796FNIEMOP_DEF(iemOp_movbe_Mv_Gv)
1797{
1798 IEMOP_MNEMONIC2(MR, MOVBE, movbe, Ev, Gv, DISOPTYPE_HARMLESS, 0);
1799 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovBe)
1800 return iemOp_InvalidNeedRM(pVCpu);
1801
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803 if (!IEM_IS_MODRM_REG_MODE(bRm))
1804 {
1805 /*
1806 * Memory, register.
1807 */
1808 switch (pVCpu->iem.s.enmEffOpSize)
1809 {
1810 case IEMMODE_16BIT:
1811 IEM_MC_BEGIN(0, 2);
1812 IEM_MC_LOCAL(uint16_t, u16Value);
1813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1816 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
1817 IEM_MC_BSWAP_LOCAL_U16(u16Value);
1818 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
1819 IEM_MC_ADVANCE_RIP_AND_FINISH();
1820 IEM_MC_END();
1821 break;
1822
1823 case IEMMODE_32BIT:
1824 IEM_MC_BEGIN(0, 2);
1825 IEM_MC_LOCAL(uint32_t, u32Value);
1826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1829 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
1830 IEM_MC_BSWAP_LOCAL_U32(u32Value);
1831 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
1832 IEM_MC_ADVANCE_RIP_AND_FINISH();
1833 IEM_MC_END();
1834 break;
1835
1836 case IEMMODE_64BIT:
1837 IEM_MC_BEGIN(0, 2);
1838 IEM_MC_LOCAL(uint64_t, u64Value);
1839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1842 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
1843 IEM_MC_BSWAP_LOCAL_U64(u64Value);
1844 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
1845 IEM_MC_ADVANCE_RIP_AND_FINISH();
1846 IEM_MC_END();
1847 break;
1848
1849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1850 }
1851 }
1852 else
1853 {
1854 /* Reg/reg not supported. */
1855 return IEMOP_RAISE_INVALID_OPCODE();
1856 }
1857}
1858
1859
1860/* Opcode 0xf3 0x0f 0x38 0xf1 - invalid. */
1861
1862
1863/** Opcode 0xf2 0x0f 0x38 0xf1. */
1864FNIEMOP_DEF(iemOp_crc32_Gv_Ev)
1865{
1866 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Ev, DISOPTYPE_HARMLESS, 0);
1867 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1868 return iemOp_InvalidNeedRM(pVCpu);
1869
1870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1871 if (IEM_IS_MODRM_REG_MODE(bRm))
1872 {
1873 /*
1874 * Register, register.
1875 */
1876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1877 switch (pVCpu->iem.s.enmEffOpSize)
1878 {
1879 case IEMMODE_16BIT:
1880 IEM_MC_BEGIN(2, 0);
1881 IEM_MC_ARG(uint32_t *, puDst, 0);
1882 IEM_MC_ARG(uint16_t, uSrc, 1);
1883 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1884 IEM_MC_FETCH_GREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1885 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1886 puDst, uSrc);
1887 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1888 IEM_MC_ADVANCE_RIP_AND_FINISH();
1889 IEM_MC_END();
1890 break;
1891
1892 case IEMMODE_32BIT:
1893 IEM_MC_BEGIN(2, 0);
1894 IEM_MC_ARG(uint32_t *, puDst, 0);
1895 IEM_MC_ARG(uint32_t, uSrc, 1);
1896 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1897 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1898 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1899 puDst, uSrc);
1900 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1901 IEM_MC_ADVANCE_RIP_AND_FINISH();
1902 IEM_MC_END();
1903 break;
1904
1905 case IEMMODE_64BIT:
1906 IEM_MC_BEGIN(2, 0);
1907 IEM_MC_ARG(uint32_t *, puDst, 0);
1908 IEM_MC_ARG(uint64_t, uSrc, 1);
1909 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1910 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1911 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1912 puDst, uSrc);
1913 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1914 IEM_MC_ADVANCE_RIP_AND_FINISH();
1915 IEM_MC_END();
1916 break;
1917
1918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1919 }
1920 }
1921 else
1922 {
1923 /*
1924 * Register, memory.
1925 */
1926 switch (pVCpu->iem.s.enmEffOpSize)
1927 {
1928 case IEMMODE_16BIT:
1929 IEM_MC_BEGIN(2, 1);
1930 IEM_MC_ARG(uint32_t *, puDst, 0);
1931 IEM_MC_ARG(uint16_t, uSrc, 1);
1932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1933
1934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1936 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1937
1938 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1939 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1940 puDst, uSrc);
1941 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1942
1943 IEM_MC_ADVANCE_RIP_AND_FINISH();
1944 IEM_MC_END();
1945 break;
1946
1947 case IEMMODE_32BIT:
1948 IEM_MC_BEGIN(2, 1);
1949 IEM_MC_ARG(uint32_t *, puDst, 0);
1950 IEM_MC_ARG(uint32_t, uSrc, 1);
1951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1952
1953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1955 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1956
1957 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1958 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1959 puDst, uSrc);
1960 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1961
1962 IEM_MC_ADVANCE_RIP_AND_FINISH();
1963 IEM_MC_END();
1964 break;
1965
1966 case IEMMODE_64BIT:
1967 IEM_MC_BEGIN(2, 1);
1968 IEM_MC_ARG(uint32_t *, puDst, 0);
1969 IEM_MC_ARG(uint64_t, uSrc, 1);
1970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1971
1972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1975
1976 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1977 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1978 puDst, uSrc);
1979 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1980
1981 IEM_MC_ADVANCE_RIP_AND_FINISH();
1982 IEM_MC_END();
1983 break;
1984
1985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1986 }
1987 }
1988}
1989
1990
1991/* Opcode 0x0f 0x38 0xf2 - invalid (vex only). */
1992/* Opcode 0x66 0x0f 0x38 0xf2 - invalid. */
1993/* Opcode 0xf3 0x0f 0x38 0xf2 - invalid. */
1994/* Opcode 0xf2 0x0f 0x38 0xf2 - invalid. */
1995
1996/* Opcode 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1997/* Opcode 0x66 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1998/* Opcode 0xf3 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1999/* Opcode 0xf2 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
2000
2001/* Opcode 0x0f 0x38 0xf4 - invalid. */
2002/* Opcode 0x66 0x0f 0x38 0xf4 - invalid. */
2003/* Opcode 0xf3 0x0f 0x38 0xf4 - invalid. */
2004/* Opcode 0xf2 0x0f 0x38 0xf4 - invalid. */
2005
2006/* Opcode 0x0f 0x38 0xf5 - invalid (vex only). */
2007/* Opcode 0x66 0x0f 0x38 0xf5 - invalid. */
2008/* Opcode 0xf3 0x0f 0x38 0xf5 - invalid (vex only). */
2009/* Opcode 0xf2 0x0f 0x38 0xf5 - invalid (vex only). */
2010
2011/* Opcode 0x0f 0x38 0xf6 - invalid. */
2012
2013#define ADX_EMIT(a_Variant) \
2014 do \
2015 { \
2016 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAdx) \
2017 return iemOp_InvalidNeedRM(pVCpu); \
2018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2019 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2020 { \
2021 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2022 { \
2023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2024 IEM_MC_BEGIN(3, 0); \
2025 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
2026 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2027 IEM_MC_ARG(uint64_t, u64Src, 2); \
2028 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2029 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2030 IEM_MC_REF_EFLAGS(pEFlags); \
2031 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u64, iemAImpl_## a_Variant ##_u64_fallback), \
2032 pu64Dst, pEFlags, u64Src); \
2033 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2034 IEM_MC_END(); \
2035 } \
2036 else \
2037 { \
2038 IEM_MC_BEGIN(3, 1); \
2039 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
2040 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2041 IEM_MC_ARG(uint64_t, u64Src, 2); \
2042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2045 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2046 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2047 IEM_MC_REF_EFLAGS(pEFlags); \
2048 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u64, iemAImpl_## a_Variant ##_u64_fallback), \
2049 pu64Dst, pEFlags, u64Src); \
2050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2051 IEM_MC_END(); \
2052 } \
2053 } \
2054 else \
2055 { \
2056 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2057 { \
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2059 IEM_MC_BEGIN(3, 0); \
2060 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2061 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2062 IEM_MC_ARG(uint32_t, u32Src, 2); \
2063 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2064 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2065 IEM_MC_REF_EFLAGS(pEFlags); \
2066 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u32, iemAImpl_## a_Variant ##_u32_fallback), \
2067 pu32Dst, pEFlags, u32Src); \
2068 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2069 IEM_MC_END(); \
2070 } \
2071 else \
2072 { \
2073 IEM_MC_BEGIN(3, 1); \
2074 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2075 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2076 IEM_MC_ARG(uint32_t, u32Src, 2); \
2077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
2079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2080 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2081 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2082 IEM_MC_REF_EFLAGS(pEFlags); \
2083 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u32, iemAImpl_## a_Variant ##_u32_fallback), \
2084 pu32Dst, pEFlags, u32Src); \
2085 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2086 IEM_MC_END(); \
2087 } \
2088 } \
2089 } while(0)
2090
2091/** Opcode 0x66 0x0f 0x38 0xf6. */
2092FNIEMOP_DEF(iemOp_adcx_Gy_Ey)
2093{
2094 IEMOP_MNEMONIC2(RM, ADCX, adcx, Gy, Ey, DISOPTYPE_HARMLESS, 0);
2095 ADX_EMIT(adcx);
2096}
2097
2098
2099/** Opcode 0xf3 0x0f 0x38 0xf6. */
2100FNIEMOP_DEF(iemOp_adox_Gy_Ey)
2101{
2102 IEMOP_MNEMONIC2(RM, ADOX, adox, Gy, Ey, DISOPTYPE_HARMLESS, 0);
2103 ADX_EMIT(adox);
2104}
2105
2106
2107/* Opcode 0xf2 0x0f 0x38 0xf6 - invalid (vex only). */
2108
2109/* Opcode 0x0f 0x38 0xf7 - invalid (vex only). */
2110/* Opcode 0x66 0x0f 0x38 0xf7 - invalid (vex only). */
2111/* Opcode 0xf3 0x0f 0x38 0xf7 - invalid (vex only). */
2112/* Opcode 0xf2 0x0f 0x38 0xf7 - invalid (vex only). */
2113
2114/* Opcode 0x0f 0x38 0xf8 - invalid. */
2115/* Opcode 0x66 0x0f 0x38 0xf8 - invalid. */
2116/* Opcode 0xf3 0x0f 0x38 0xf8 - invalid. */
2117/* Opcode 0xf2 0x0f 0x38 0xf8 - invalid. */
2118
2119/* Opcode 0x0f 0x38 0xf9 - invalid. */
2120/* Opcode 0x66 0x0f 0x38 0xf9 - invalid. */
2121/* Opcode 0xf3 0x0f 0x38 0xf9 - invalid. */
2122/* Opcode 0xf2 0x0f 0x38 0xf9 - invalid. */
2123
2124/* Opcode 0x0f 0x38 0xfa - invalid. */
2125/* Opcode 0x66 0x0f 0x38 0xfa - invalid. */
2126/* Opcode 0xf3 0x0f 0x38 0xfa - invalid. */
2127/* Opcode 0xf2 0x0f 0x38 0xfa - invalid. */
2128
2129/* Opcode 0x0f 0x38 0xfb - invalid. */
2130/* Opcode 0x66 0x0f 0x38 0xfb - invalid. */
2131/* Opcode 0xf3 0x0f 0x38 0xfb - invalid. */
2132/* Opcode 0xf2 0x0f 0x38 0xfb - invalid. */
2133
2134/* Opcode 0x0f 0x38 0xfc - invalid. */
2135/* Opcode 0x66 0x0f 0x38 0xfc - invalid. */
2136/* Opcode 0xf3 0x0f 0x38 0xfc - invalid. */
2137/* Opcode 0xf2 0x0f 0x38 0xfc - invalid. */
2138
2139/* Opcode 0x0f 0x38 0xfd - invalid. */
2140/* Opcode 0x66 0x0f 0x38 0xfd - invalid. */
2141/* Opcode 0xf3 0x0f 0x38 0xfd - invalid. */
2142/* Opcode 0xf2 0x0f 0x38 0xfd - invalid. */
2143
2144/* Opcode 0x0f 0x38 0xfe - invalid. */
2145/* Opcode 0x66 0x0f 0x38 0xfe - invalid. */
2146/* Opcode 0xf3 0x0f 0x38 0xfe - invalid. */
2147/* Opcode 0xf2 0x0f 0x38 0xfe - invalid. */
2148
2149/* Opcode 0x0f 0x38 0xff - invalid. */
2150/* Opcode 0x66 0x0f 0x38 0xff - invalid. */
2151/* Opcode 0xf3 0x0f 0x38 0xff - invalid. */
2152/* Opcode 0xf2 0x0f 0x38 0xff - invalid. */
2153
2154
2155/**
2156 * Three byte opcode map, first two bytes are 0x0f 0x38.
2157 * @sa g_apfnVexMap2
2158 */
2159IEM_STATIC const PFNIEMOP g_apfnThreeByte0f38[] =
2160{
2161 /* no prefix, 066h prefix f3h prefix, f2h prefix */
2162 /* 0x00 */ iemOp_pshufb_Pq_Qq, iemOp_pshufb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2163 /* 0x01 */ iemOp_phaddw_Pq_Qq, iemOp_phaddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2164 /* 0x02 */ iemOp_phaddd_Pq_Qq, iemOp_phaddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2165 /* 0x03 */ iemOp_phaddsw_Pq_Qq, iemOp_phaddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2166 /* 0x04 */ iemOp_pmaddubsw_Pq_Qq, iemOp_pmaddubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2167 /* 0x05 */ iemOp_phsubw_Pq_Qq, iemOp_phsubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2168 /* 0x06 */ iemOp_phsubd_Pq_Qq, iemOp_phsubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2169 /* 0x07 */ iemOp_phsubsw_Pq_Qq, iemOp_phsubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2170 /* 0x08 */ iemOp_psignb_Pq_Qq, iemOp_psignb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2171 /* 0x09 */ iemOp_psignw_Pq_Qq, iemOp_psignw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2172 /* 0x0a */ iemOp_psignd_Pq_Qq, iemOp_psignd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2173 /* 0x0b */ iemOp_pmulhrsw_Pq_Qq, iemOp_pmulhrsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2174 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
2175 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
2176 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
2177 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
2178
2179 /* 0x10 */ iemOp_InvalidNeedRM, iemOp_pblendvb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2180 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
2181 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
2182 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
2183 /* 0x14 */ iemOp_InvalidNeedRM, iemOp_blendvps_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2184 /* 0x15 */ iemOp_InvalidNeedRM, iemOp_blendvpd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2185 /* 0x16 */ IEMOP_X4(iemOp_InvalidNeedRM),
2186 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_ptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2187 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
2188 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
2189 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
2190 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
2191 /* 0x1c */ iemOp_pabsb_Pq_Qq, iemOp_pabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2192 /* 0x1d */ iemOp_pabsw_Pq_Qq, iemOp_pabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2193 /* 0x1e */ iemOp_pabsd_Pq_Qq, iemOp_pabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2194 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
2195
2196 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_pmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2197 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_pmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2198 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_pmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2199 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_pmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2200 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_pmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2201 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_pmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2202 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
2203 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
2204 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_pmuldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2205 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_pcmpeqq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2206 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_movntdqa_Vdq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2207 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_packusdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2208 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRM),
2209 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRM),
2210 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRM),
2211 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRM),
2212
2213 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_pmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2214 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_pmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2215 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_pmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2216 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_pmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2217 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_pmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2218 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_pmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2219 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
2220 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_pcmpgtq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2221 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_pminsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2222 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_pminsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2223 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_pminuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2224 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_pminud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2225 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_pmaxsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2226 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_pmaxsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2227 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_pmaxuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2228 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_pmaxud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2229
2230 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_pmulld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2231 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_phminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2232 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
2233 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
2234 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
2235 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
2236 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
2237 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
2238 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
2239 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
2240 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
2241 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
2242 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
2243 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
2244 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
2245 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
2246
2247 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
2248 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
2249 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
2250 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
2251 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
2252 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
2253 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
2254 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
2255 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRM),
2256 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRM),
2257 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRM),
2258 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
2259 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
2260 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
2261 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
2262 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
2263
2264 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
2265 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
2266 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
2267 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
2268 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
2269 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
2270 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
2271 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
2272 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
2273 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
2274 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
2275 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
2276 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
2277 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
2278 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
2279 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
2280
2281 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
2282 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
2283 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
2284 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
2285 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
2286 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
2287 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
2288 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
2289 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
2290 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
2291 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
2292 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
2293 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
2294 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
2295 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
2296 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
2297
2298 /* 0x80 */ iemOp_InvalidNeedRM, iemOp_invept_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2299 /* 0x81 */ iemOp_InvalidNeedRM, iemOp_invvpid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2300 /* 0x82 */ iemOp_InvalidNeedRM, iemOp_invpcid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2301 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
2302 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
2303 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
2304 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
2305 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
2306 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
2307 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
2308 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
2309 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
2310 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
2311 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
2312 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
2313 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
2314
2315 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
2316 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
2317 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
2318 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
2319 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
2320 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
2321 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
2322 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
2323 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
2324 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
2325 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
2326 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
2327 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
2328 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
2329 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
2330 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
2331
2332 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2333 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2334 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2335 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2336 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2337 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2338 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2339 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2340 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2341 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2342 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
2343 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
2344 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
2345 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
2346 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRM),
2347 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
2348
2349 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2350 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2351 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2352 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2353 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2354 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2355 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2356 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2357 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2358 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2359 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
2360 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
2361 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
2362 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
2363 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
2364 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
2365
2366 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2367 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2368 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2369 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2370 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2371 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2372 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2373 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2374 /* 0xc8 */ iemOp_sha1nexte_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2375 /* 0xc9 */ iemOp_sha1msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2376 /* 0xca */ iemOp_sha1msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2377 /* 0xcb */ iemOp_sha256rnds2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2378 /* 0xcc */ iemOp_sha256msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2379 /* 0xcd */ iemOp_sha256msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2380 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
2381 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
2382
2383 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2384 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2385 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2386 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2387 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2388 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2389 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2390 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2391 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2392 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2393 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
2394 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_aesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2395 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_aesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2396 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_aesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2397 /* 0xde */ iemOp_InvalidNeedRM, iemOp_aesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2398 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_aesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2399
2400 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2401 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2402 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2403 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2404 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2405 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2406 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2407 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2408 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2409 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2410 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
2411 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
2412 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
2413 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
2414 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
2415 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
2416
2417 /* 0xf0 */ iemOp_movbe_Gv_Mv, iemOp_movbe_Gv_Mv, iemOp_InvalidNeedRM, iemOp_crc32_Gd_Eb,
2418 /* 0xf1 */ iemOp_movbe_Mv_Gv, iemOp_movbe_Mv_Gv, iemOp_InvalidNeedRM, iemOp_crc32_Gv_Ev,
2419 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2420 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2421 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2422 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2423 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_adcx_Gy_Ey, iemOp_adox_Gy_Ey, iemOp_InvalidNeedRM,
2424 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2425 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2426 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2427 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
2428 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
2429 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
2430 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
2431 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
2432 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
2433};
2434AssertCompile(RT_ELEMENTS(g_apfnThreeByte0f38) == 1024);
2435
2436/** @} */
2437
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette