VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h@ 98150

Last change on this file since 98150 was 98103, checked in by vboxsync, 2 years ago

Copyright year updates by scm.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 91.1 KB
Line 
1/* $Id: IEMAllInstructionsVexMap2.cpp.h 98103 2023-01-17 14:15:46Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsThree0f38.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 2
33 * @{
34 */
35
36/* Opcode VEX.0F38 0x00 - invalid. */
37
38
39/** Opcode VEX.66.0F38 0x00. */
40FNIEMOP_DEF(iemOp_vpshufb_Vx_Hx_Wx)
41{
42 IEMOP_MNEMONIC3(VEX_RVM, VPSHUFB, vpshufb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
43 IEMOPMEDIAF3_INIT_VARS(vpshufb);
44 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
45}
46
47
48/* Opcode VEX.0F38 0x01 - invalid. */
49
50
51/** Opcode VEX.66.0F38 0x01. */
52FNIEMOP_DEF(iemOp_vphaddw_Vx_Hx_Wx)
53{
54 IEMOP_MNEMONIC3(VEX_RVM, VPHADDW, vphaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
55 IEMOPMEDIAOPTF3_INIT_VARS(vphaddw);
56 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
57}
58
59
60/* Opcode VEX.0F38 0x02 - invalid. */
61
62
63/** Opcode VEX.66.0F38 0x02. */
64FNIEMOP_DEF(iemOp_vphaddd_Vx_Hx_Wx)
65{
66 IEMOP_MNEMONIC3(VEX_RVM, VPHADDD, vphaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
67 IEMOPMEDIAOPTF3_INIT_VARS(vphaddd);
68 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
69}
70
71
72/* Opcode VEX.0F38 0x03 - invalid. */
73
74
75/** Opcode VEX.66.0F38 0x03. */
76FNIEMOP_DEF(iemOp_vphaddsw_Vx_Hx_Wx)
77{
78 IEMOP_MNEMONIC3(VEX_RVM, VPHADDSW, vphaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
79 IEMOPMEDIAOPTF3_INIT_VARS(vphaddsw);
80 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
81}
82
83
84/* Opcode VEX.0F38 0x04 - invalid. */
85
86
87/** Opcode VEX.66.0F38 0x04. */
88FNIEMOP_DEF(iemOp_vpmaddubsw_Vx_Hx_Wx)
89{
90 IEMOP_MNEMONIC3(VEX_RVM, VPMADDUBSW, vpmaddubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
91 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddubsw);
92 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
93}
94
95
96/* Opcode VEX.0F38 0x05 - invalid. */
97
98
99/** Opcode VEX.66.0F38 0x05. */
100FNIEMOP_DEF(iemOp_vphsubw_Vx_Hx_Wx)
101{
102 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBW, vphsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
103 IEMOPMEDIAOPTF3_INIT_VARS(vphsubw);
104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
105}
106
107
108/* Opcode VEX.0F38 0x06 - invalid. */
109
110
111/** Opcode VEX.66.0F38 0x06. */
112FNIEMOP_DEF(iemOp_vphsubd_Vx_Hx_Wx)
113{
114 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBD, vphsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
115 IEMOPMEDIAOPTF3_INIT_VARS(vphsubd);
116 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
117}
118
119
120/* Opcode VEX.0F38 0x07 - invalid. */
121
122
123/** Opcode VEX.66.0F38 0x07. */
124FNIEMOP_DEF(iemOp_vphsubsw_Vx_Hx_Wx)
125{
126 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBSW, vphsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
127 IEMOPMEDIAOPTF3_INIT_VARS(vphsubsw);
128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
129}
130
131
132/* Opcode VEX.0F38 0x08 - invalid. */
133
134
135/** Opcode VEX.66.0F38 0x08. */
136FNIEMOP_DEF(iemOp_vpsignb_Vx_Hx_Wx)
137{
138 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNB, vpsignb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
139 IEMOPMEDIAOPTF3_INIT_VARS(vpsignb);
140 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
141}
142
143
144/* Opcode VEX.0F38 0x09 - invalid. */
145
146
147/** Opcode VEX.66.0F38 0x09. */
148FNIEMOP_DEF(iemOp_vpsignw_Vx_Hx_Wx)
149{
150 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNW, vpsignw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
151 IEMOPMEDIAOPTF3_INIT_VARS(vpsignw);
152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
153}
154
155
156/* Opcode VEX.0F38 0x0a - invalid. */
157
158
159/** Opcode VEX.66.0F38 0x0a. */
160FNIEMOP_DEF(iemOp_vpsignd_Vx_Hx_Wx)
161{
162 IEMOP_MNEMONIC3(VEX_RVM, VPSIGND, vpsignd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
163 IEMOPMEDIAOPTF3_INIT_VARS(vpsignd);
164 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
165}
166
167
168/* Opcode VEX.0F38 0x0b - invalid. */
169
170
171/** Opcode VEX.66.0F38 0x0b. */
172FNIEMOP_DEF(iemOp_vpmulhrsw_Vx_Hx_Wx)
173{
174 IEMOP_MNEMONIC3(VEX_RVM, VPMULHRSW, vpmulhrsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
175 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhrsw);
176 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
177}
178
179
180/* Opcode VEX.0F38 0x0c - invalid. */
181/** Opcode VEX.66.0F38 0x0c. */
182FNIEMOP_STUB(iemOp_vpermilps_Vx_Hx_Wx);
183/* Opcode VEX.0F38 0x0d - invalid. */
184/** Opcode VEX.66.0F38 0x0d. */
185FNIEMOP_STUB(iemOp_vpermilpd_Vx_Hx_Wx);
186/* Opcode VEX.0F38 0x0e - invalid. */
187/** Opcode VEX.66.0F38 0x0e. */
188FNIEMOP_STUB(iemOp_vtestps_Vx_Wx);
189/* Opcode VEX.0F38 0x0f - invalid. */
190/** Opcode VEX.66.0F38 0x0f. */
191FNIEMOP_STUB(iemOp_vtestpd_Vx_Wx);
192
193
194/* Opcode VEX.0F38 0x10 - invalid */
195/* Opcode VEX.66.0F38 0x10 - invalid (legacy only). */
196/* Opcode VEX.0F38 0x11 - invalid */
197/* Opcode VEX.66.0F38 0x11 - invalid */
198/* Opcode VEX.0F38 0x12 - invalid */
199/* Opcode VEX.66.0F38 0x12 - invalid */
200/* Opcode VEX.0F38 0x13 - invalid */
201/* Opcode VEX.66.0F38 0x13 - invalid (vex only). */
202/* Opcode VEX.0F38 0x14 - invalid */
203/* Opcode VEX.66.0F38 0x14 - invalid (legacy only). */
204/* Opcode VEX.0F38 0x15 - invalid */
205/* Opcode VEX.66.0F38 0x15 - invalid (legacy only). */
206/* Opcode VEX.0F38 0x16 - invalid */
207/** Opcode VEX.66.0F38 0x16. */
208FNIEMOP_STUB(iemOp_vpermps_Vqq_Hqq_Wqq);
209/* Opcode VEX.0F38 0x17 - invalid */
210
211
212/** Opcode VEX.66.0F38 0x17 - invalid */
213FNIEMOP_DEF(iemOp_vptest_Vx_Wx)
214{
215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
216 if (IEM_IS_MODRM_REG_MODE(bRm))
217 {
218 /*
219 * Register, register.
220 */
221 if (pVCpu->iem.s.uVexLength)
222 {
223 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
224 IEM_MC_BEGIN(3, 2);
225 IEM_MC_LOCAL(RTUINT256U, uSrc1);
226 IEM_MC_LOCAL(RTUINT256U, uSrc2);
227 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
228 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
231 IEM_MC_PREPARE_AVX_USAGE();
232 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
233 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
234 IEM_MC_REF_EFLAGS(pEFlags);
235 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
236 puSrc1, puSrc2, pEFlags);
237 IEM_MC_ADVANCE_RIP_AND_FINISH();
238 IEM_MC_END();
239 }
240 else
241 {
242 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
243 IEM_MC_BEGIN(3, 0);
244 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
245 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
248 IEM_MC_PREPARE_AVX_USAGE();
249 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
250 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
251 IEM_MC_REF_EFLAGS(pEFlags);
252 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
253 IEM_MC_ADVANCE_RIP_AND_FINISH();
254 IEM_MC_END();
255 }
256 }
257 else
258 {
259 /*
260 * Register, memory.
261 */
262 if (pVCpu->iem.s.uVexLength)
263 {
264 IEM_MC_BEGIN(3, 3);
265 IEM_MC_LOCAL(RTUINT256U, uSrc1);
266 IEM_MC_LOCAL(RTUINT256U, uSrc2);
267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
268 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
269 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
271
272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
273 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
274 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
275 IEM_MC_PREPARE_AVX_USAGE();
276
277 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
278 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
279 IEM_MC_REF_EFLAGS(pEFlags);
280 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
281 puSrc1, puSrc2, pEFlags);
282
283 IEM_MC_ADVANCE_RIP_AND_FINISH();
284 IEM_MC_END();
285 }
286 else
287 {
288 IEM_MC_BEGIN(3, 2);
289 IEM_MC_LOCAL(RTUINT128U, uSrc2);
290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
291 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
292 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
293 IEM_MC_ARG(uint32_t *, pEFlags, 2);
294
295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
296 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
298 IEM_MC_PREPARE_AVX_USAGE();
299
300 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
301 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
302 IEM_MC_REF_EFLAGS(pEFlags);
303 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
304
305 IEM_MC_ADVANCE_RIP_AND_FINISH();
306 IEM_MC_END();
307 }
308 }
309}
310
311
312/* Opcode VEX.0F38 0x18 - invalid */
313/** Opcode VEX.66.0F38 0x18. */
314FNIEMOP_STUB(iemOp_vbroadcastss_Vx_Wd);
315/* Opcode VEX.0F38 0x19 - invalid */
316/** Opcode VEX.66.0F38 0x19. */
317FNIEMOP_STUB(iemOp_vbroadcastsd_Vqq_Wq);
318/* Opcode VEX.0F38 0x1a - invalid */
319/** Opcode VEX.66.0F38 0x1a. */
320FNIEMOP_STUB(iemOp_vbroadcastf128_Vqq_Mdq);
321/* Opcode VEX.0F38 0x1b - invalid */
322/* Opcode VEX.66.0F38 0x1b - invalid */
323/* Opcode VEX.0F38 0x1c - invalid. */
324
325
326/** Opcode VEX.66.0F38 0x1c. */
327FNIEMOP_DEF(iemOp_vpabsb_Vx_Wx)
328{
329 IEMOP_MNEMONIC2(VEX_RM, VPABSB, vpabsb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
330 IEMOPMEDIAOPTF2_INIT_VARS(vpabsb);
331 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
332}
333
334
335/* Opcode VEX.0F38 0x1d - invalid. */
336
337
338/** Opcode VEX.66.0F38 0x1d. */
339FNIEMOP_DEF(iemOp_vpabsw_Vx_Wx)
340{
341 IEMOP_MNEMONIC2(VEX_RM, VPABSW, vpabsw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
342 IEMOPMEDIAOPTF2_INIT_VARS(vpabsw);
343 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
344}
345
346/* Opcode VEX.0F38 0x1e - invalid. */
347
348
349/** Opcode VEX.66.0F38 0x1e. */
350FNIEMOP_DEF(iemOp_vpabsd_Vx_Wx)
351{
352 IEMOP_MNEMONIC2(VEX_RM, VPABSD, vpabsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
353 IEMOPMEDIAOPTF2_INIT_VARS(vpabsd);
354 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
355}
356
357
358/* Opcode VEX.0F38 0x1f - invalid */
359/* Opcode VEX.66.0F38 0x1f - invalid */
360
361
362/** Body for the vpmov{s,z}x* instructions. */
363#define IEMOP_BODY_VPMOV_S_Z(a_Instr, a_SrcWidth) \
364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
365 if (IEM_IS_MODRM_REG_MODE(bRm)) \
366 { \
367 /* \
368 * Register, register. \
369 */ \
370 if (pVCpu->iem.s.uVexLength) \
371 { \
372 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
373 IEM_MC_BEGIN(2, 1); \
374 IEM_MC_LOCAL(RTUINT256U, uDst); \
375 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
376 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
377 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
378 IEM_MC_PREPARE_AVX_USAGE(); \
379 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
380 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
381 iemAImpl_ ## a_Instr ## _u256_fallback), \
382 puDst, puSrc); \
383 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
385 IEM_MC_END(); \
386 } \
387 else \
388 { \
389 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
390 IEM_MC_BEGIN(2, 0); \
391 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
392 IEM_MC_ARG(uint64_t, uSrc, 1); \
393 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
394 IEM_MC_PREPARE_AVX_USAGE(); \
395 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
396 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); \
397 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
398 iemAImpl_## a_Instr ## _u128_fallback), \
399 puDst, uSrc); \
400 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
401 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
402 IEM_MC_END(); \
403 } \
404 } \
405 else \
406 { \
407 /* \
408 * Register, memory. \
409 */ \
410 if (pVCpu->iem.s.uVexLength) \
411 { \
412 IEM_MC_BEGIN(2, 3); \
413 IEM_MC_LOCAL(RTUINT256U, uDst); \
414 IEM_MC_LOCAL(RTUINT128U, uSrc); \
415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
416 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
417 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
419 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
420 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
421 IEM_MC_PREPARE_AVX_USAGE(); \
422 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
423 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
424 iemAImpl_ ## a_Instr ## _u256_fallback), \
425 puDst, puSrc); \
426 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
427 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
428 IEM_MC_END(); \
429 } \
430 else \
431 { \
432 IEM_MC_BEGIN(2, 1); \
433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
434 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
435 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
437 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
438 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
439 IEM_MC_PREPARE_AVX_USAGE(); \
440 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
441 IEM_MC_FETCH_MEM_U ## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
442 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
443 iemAImpl_ ## a_Instr ## _u128_fallback), \
444 puDst, uSrc); \
445 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
446 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
447 IEM_MC_END(); \
448 } \
449 } \
450 (void)0
451
452/** Opcode VEX.66.0F38 0x20. */
453FNIEMOP_DEF(iemOp_vpmovsxbw_Vx_UxMq)
454{
455 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
456 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBW, vpmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
457 IEMOP_BODY_VPMOV_S_Z(vpmovsxbw, 64);
458}
459
460
461/** Opcode VEX.66.0F38 0x21. */
462FNIEMOP_DEF(iemOp_vpmovsxbd_Vx_UxMd)
463{
464 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
465 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBD, vpmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
466 IEMOP_BODY_VPMOV_S_Z(vpmovsxbd, 32);
467}
468
469
470/** Opcode VEX.66.0F38 0x22. */
471FNIEMOP_DEF(iemOp_vpmovsxbq_Vx_UxMw)
472{
473 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
474 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBQ, vpmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
475 IEMOP_BODY_VPMOV_S_Z(vpmovsxbq, 16);
476}
477
478
479/** Opcode VEX.66.0F38 0x23. */
480FNIEMOP_DEF(iemOp_vpmovsxwd_Vx_UxMq)
481{
482 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
483 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWD, vpmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
484 IEMOP_BODY_VPMOV_S_Z(vpmovsxwd, 64);
485}
486
487
488/** Opcode VEX.66.0F38 0x24. */
489FNIEMOP_DEF(iemOp_vpmovsxwq_Vx_UxMd)
490{
491 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
492 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWQ, vpmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
493 IEMOP_BODY_VPMOV_S_Z(vpmovsxwq, 32);
494}
495
496
497/** Opcode VEX.66.0F38 0x25. */
498FNIEMOP_DEF(iemOp_vpmovsxdq_Vx_UxMq)
499{
500 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
501 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXDQ, vpmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
502 IEMOP_BODY_VPMOV_S_Z(vpmovsxdq, 64);
503}
504
505
506/* Opcode VEX.66.0F38 0x26 - invalid */
507/* Opcode VEX.66.0F38 0x27 - invalid */
508
509
510/** Opcode VEX.66.0F38 0x28. */
511FNIEMOP_DEF(iemOp_vpmuldq_Vx_Hx_Wx)
512{
513 IEMOP_MNEMONIC3(VEX_RVM, VPMULDQ, vpmuldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
514 IEMOPMEDIAOPTF3_INIT_VARS(vpmuldq);
515 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
516}
517
518
519/** Opcode VEX.66.0F38 0x29. */
520FNIEMOP_DEF(iemOp_vpcmpeqq_Vx_Hx_Wx)
521{
522 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQQ, vpcmpeqq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
523 IEMOPMEDIAF3_INIT_VARS(vpcmpeqq);
524 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
525}
526
527
528FNIEMOP_DEF(iemOp_vmovntdqa_Vx_Mx)
529{
530 Assert(pVCpu->iem.s.uVexLength <= 1);
531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
532 if (IEM_IS_MODRM_MEM_MODE(bRm))
533 {
534 if (pVCpu->iem.s.uVexLength == 0)
535 {
536 /**
537 * @opcode 0x2a
538 * @opcodesub !11 mr/reg vex.l=0
539 * @oppfx 0x66
540 * @opcpuid avx
541 * @opgroup og_avx_cachect
542 * @opxcpttype 1
543 * @optest op1=-1 op2=2 -> op1=2
544 * @optest op1=0 op2=-42 -> op1=-42
545 */
546 /* 128-bit: Memory, register. */
547 IEMOP_MNEMONIC2EX(vmovntdqa_Vdq_WO_Mdq_L0, "vmovntdqa, Vdq_WO, Mdq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
548 DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
549 IEM_MC_BEGIN(0, 2);
550 IEM_MC_LOCAL(RTUINT128U, uSrc);
551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
552
553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
554 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
555 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
556 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
557
558 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
559 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
560
561 IEM_MC_ADVANCE_RIP_AND_FINISH();
562 IEM_MC_END();
563 }
564 else
565 {
566 /**
567 * @opdone
568 * @opcode 0x2a
569 * @opcodesub !11 mr/reg vex.l=1
570 * @oppfx 0x66
571 * @opcpuid avx2
572 * @opgroup og_avx2_cachect
573 * @opxcpttype 1
574 * @optest op1=-1 op2=2 -> op1=2
575 * @optest op1=0 op2=-42 -> op1=-42
576 */
577 /* 256-bit: Memory, register. */
578 IEMOP_MNEMONIC2EX(vmovntdqa_Vqq_WO_Mqq_L1, "vmovntdqa, Vqq_WO,Mqq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
579 DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
580 IEM_MC_BEGIN(0, 2);
581 IEM_MC_LOCAL(RTUINT256U, uSrc);
582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
583
584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
585 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
586 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
587 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
588
589 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
590 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
591
592 IEM_MC_ADVANCE_RIP_AND_FINISH();
593 IEM_MC_END();
594 }
595 }
596
597 /**
598 * @opdone
599 * @opmnemonic udvex660f382arg
600 * @opcode 0x2a
601 * @opcodesub 11 mr/reg
602 * @oppfx 0x66
603 * @opunused immediate
604 * @opcpuid avx
605 * @optest ->
606 */
607 else
608 return IEMOP_RAISE_INVALID_OPCODE();
609}
610
611
612/** Opcode VEX.66.0F38 0x2b. */
613FNIEMOP_DEF(iemOp_vpackusdw_Vx_Hx_Wx)
614{
615 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSDW, vpackusdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
616 IEMOPMEDIAOPTF3_INIT_VARS( vpackusdw);
617 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
618}
619
620
621/** Opcode VEX.66.0F38 0x2c. */
622FNIEMOP_STUB(iemOp_vmaskmovps_Vx_Hx_Mx);
623/** Opcode VEX.66.0F38 0x2d. */
624FNIEMOP_STUB(iemOp_vmaskmovpd_Vx_Hx_Mx);
625/** Opcode VEX.66.0F38 0x2e. */
626FNIEMOP_STUB(iemOp_vmaskmovps_Mx_Hx_Vx);
627/** Opcode VEX.66.0F38 0x2f. */
628FNIEMOP_STUB(iemOp_vmaskmovpd_Mx_Hx_Vx);
629
630
631/** Opcode VEX.66.0F38 0x30. */
632FNIEMOP_DEF(iemOp_vpmovzxbw_Vx_UxMq)
633{
634 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
635 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBW, vpmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
636 IEMOP_BODY_VPMOV_S_Z(vpmovzxbw, 64);
637}
638
639
640/** Opcode VEX.66.0F38 0x31. */
641FNIEMOP_DEF(iemOp_vpmovzxbd_Vx_UxMd)
642{
643 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
644 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBD, vpmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
645 IEMOP_BODY_VPMOV_S_Z(vpmovzxbd, 32);
646}
647
648
649/** Opcode VEX.66.0F38 0x32. */
650FNIEMOP_DEF(iemOp_vpmovzxbq_Vx_UxMw)
651{
652 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
653 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBQ, vpmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
654 IEMOP_BODY_VPMOV_S_Z(vpmovzxbq, 16);
655}
656
657
658/** Opcode VEX.66.0F38 0x33. */
659FNIEMOP_DEF(iemOp_vpmovzxwd_Vx_UxMq)
660{
661 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
662 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWD, vpmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
663 IEMOP_BODY_VPMOV_S_Z(vpmovzxwd, 64);
664}
665
666
667/** Opcode VEX.66.0F38 0x34. */
668FNIEMOP_DEF(iemOp_vpmovzxwq_Vx_UxMd)
669{
670 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
671 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWQ, vpmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
672 IEMOP_BODY_VPMOV_S_Z(vpmovzxwq, 32);
673}
674
675
676/** Opcode VEX.66.0F38 0x35. */
677FNIEMOP_DEF(iemOp_vpmovzxdq_Vx_UxMq)
678{
679 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
680 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXDQ, vpmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
681 IEMOP_BODY_VPMOV_S_Z(vpmovzxdq, 64);
682}
683
684
685/* Opcode VEX.66.0F38 0x36. */
686FNIEMOP_STUB(iemOp_vpermd_Vqq_Hqq_Wqq);
687
688
689/** Opcode VEX.66.0F38 0x37. */
690FNIEMOP_DEF(iemOp_vpcmpgtq_Vx_Hx_Wx)
691{
692 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTQ, vpcmpgtq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
693 IEMOPMEDIAF3_INIT_VARS(vpcmpgtq);
694 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
695}
696
697
698/** Opcode VEX.66.0F38 0x38. */
699FNIEMOP_DEF(iemOp_vpminsb_Vx_Hx_Wx)
700{
701 IEMOP_MNEMONIC3(VEX_RVM, VPMINSB, vpminsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
702 IEMOPMEDIAF3_INIT_VARS(vpminsb);
703 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
704}
705
706
707/** Opcode VEX.66.0F38 0x39. */
708FNIEMOP_DEF(iemOp_vpminsd_Vx_Hx_Wx)
709{
710 IEMOP_MNEMONIC3(VEX_RVM, VPMINSD, vpminsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
711 IEMOPMEDIAF3_INIT_VARS(vpminsd);
712 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
713}
714
715
716/** Opcode VEX.66.0F38 0x3a. */
717FNIEMOP_DEF(iemOp_vpminuw_Vx_Hx_Wx)
718{
719 IEMOP_MNEMONIC3(VEX_RVM, VPMINUW, vpminuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
720 IEMOPMEDIAF3_INIT_VARS(vpminuw);
721 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
722}
723
724
725/** Opcode VEX.66.0F38 0x3b. */
726FNIEMOP_DEF(iemOp_vpminud_Vx_Hx_Wx)
727{
728 IEMOP_MNEMONIC3(VEX_RVM, VPMINUD, vpminud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
729 IEMOPMEDIAF3_INIT_VARS(vpminud);
730 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
731}
732
733
734/** Opcode VEX.66.0F38 0x3c. */
735FNIEMOP_DEF(iemOp_vpmaxsb_Vx_Hx_Wx)
736{
737 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSB, vpmaxsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
738 IEMOPMEDIAF3_INIT_VARS(vpmaxsb);
739 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
740}
741
742
743/** Opcode VEX.66.0F38 0x3d. */
744FNIEMOP_DEF(iemOp_vpmaxsd_Vx_Hx_Wx)
745{
746 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSD, vpmaxsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
747 IEMOPMEDIAF3_INIT_VARS(vpmaxsd);
748 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
749}
750
751
752/** Opcode VEX.66.0F38 0x3e. */
753FNIEMOP_DEF(iemOp_vpmaxuw_Vx_Hx_Wx)
754{
755 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUW, vpmaxuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
756 IEMOPMEDIAF3_INIT_VARS(vpmaxuw);
757 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
758}
759
760
761/** Opcode VEX.66.0F38 0x3f. */
762FNIEMOP_DEF(iemOp_vpmaxud_Vx_Hx_Wx)
763{
764 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUD, vpmaxud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
765 IEMOPMEDIAF3_INIT_VARS(vpmaxud);
766 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
767}
768
769
770/** Opcode VEX.66.0F38 0x40. */
771FNIEMOP_DEF(iemOp_vpmulld_Vx_Hx_Wx)
772{
773 IEMOP_MNEMONIC3(VEX_RVM, VPMULLD, vpmulld, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
774 IEMOPMEDIAOPTF3_INIT_VARS(vpmulld);
775 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
776}
777
778
779/** Opcode VEX.66.0F38 0x41. */
780FNIEMOP_DEF(iemOp_vphminposuw_Vdq_Wdq)
781{
782 IEMOP_MNEMONIC2(VEX_RM, VPHMINPOSUW, vphminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
784 if (IEM_IS_MODRM_REG_MODE(bRm))
785 {
786 /*
787 * Register, register.
788 */
789 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
790 IEM_MC_BEGIN(2, 0);
791 IEM_MC_ARG(PRTUINT128U, puDst, 0);
792 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
793 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
794 IEM_MC_PREPARE_AVX_USAGE();
795 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
796 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
797 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
798 puDst, puSrc);
799 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
800 IEM_MC_ADVANCE_RIP_AND_FINISH();
801 IEM_MC_END();
802 }
803 else
804 {
805 /*
806 * Register, memory.
807 */
808 IEM_MC_BEGIN(2, 2);
809 IEM_MC_LOCAL(RTUINT128U, uSrc);
810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
811 IEM_MC_ARG(PRTUINT128U, puDst, 0);
812 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
813
814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
815 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
816 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
817 IEM_MC_PREPARE_AVX_USAGE();
818
819 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
820 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
821 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
822 puDst, puSrc);
823 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
824
825 IEM_MC_ADVANCE_RIP_AND_FINISH();
826 IEM_MC_END();
827 }
828}
829
830
831/* Opcode VEX.66.0F38 0x42 - invalid. */
832/* Opcode VEX.66.0F38 0x43 - invalid. */
833/* Opcode VEX.66.0F38 0x44 - invalid. */
834/** Opcode VEX.66.0F38 0x45. */
835FNIEMOP_STUB(iemOp_vpsrlvd_q_Vx_Hx_Wx);
836/** Opcode VEX.66.0F38 0x46. */
837FNIEMOP_STUB(iemOp_vsravd_Vx_Hx_Wx);
838/** Opcode VEX.66.0F38 0x47. */
839FNIEMOP_STUB(iemOp_vpsllvd_q_Vx_Hx_Wx);
840/* Opcode VEX.66.0F38 0x48 - invalid. */
841/* Opcode VEX.66.0F38 0x49 - invalid. */
842/* Opcode VEX.66.0F38 0x4a - invalid. */
843/* Opcode VEX.66.0F38 0x4b - invalid. */
844/* Opcode VEX.66.0F38 0x4c - invalid. */
845/* Opcode VEX.66.0F38 0x4d - invalid. */
846/* Opcode VEX.66.0F38 0x4e - invalid. */
847/* Opcode VEX.66.0F38 0x4f - invalid. */
848
849/* Opcode VEX.66.0F38 0x50 - invalid. */
850/* Opcode VEX.66.0F38 0x51 - invalid. */
851/* Opcode VEX.66.0F38 0x52 - invalid. */
852/* Opcode VEX.66.0F38 0x53 - invalid. */
853/* Opcode VEX.66.0F38 0x54 - invalid. */
854/* Opcode VEX.66.0F38 0x55 - invalid. */
855/* Opcode VEX.66.0F38 0x56 - invalid. */
856/* Opcode VEX.66.0F38 0x57 - invalid. */
857/** Opcode VEX.66.0F38 0x58. */
858FNIEMOP_STUB(iemOp_vpbroadcastd_Vx_Wx);
859/** Opcode VEX.66.0F38 0x59. */
860FNIEMOP_STUB(iemOp_vpbroadcastq_Vx_Wx);
861/** Opcode VEX.66.0F38 0x5a. */
862FNIEMOP_STUB(iemOp_vbroadcasti128_Vqq_Mdq);
863/* Opcode VEX.66.0F38 0x5b - invalid. */
864/* Opcode VEX.66.0F38 0x5c - invalid. */
865/* Opcode VEX.66.0F38 0x5d - invalid. */
866/* Opcode VEX.66.0F38 0x5e - invalid. */
867/* Opcode VEX.66.0F38 0x5f - invalid. */
868
869/* Opcode VEX.66.0F38 0x60 - invalid. */
870/* Opcode VEX.66.0F38 0x61 - invalid. */
871/* Opcode VEX.66.0F38 0x62 - invalid. */
872/* Opcode VEX.66.0F38 0x63 - invalid. */
873/* Opcode VEX.66.0F38 0x64 - invalid. */
874/* Opcode VEX.66.0F38 0x65 - invalid. */
875/* Opcode VEX.66.0F38 0x66 - invalid. */
876/* Opcode VEX.66.0F38 0x67 - invalid. */
877/* Opcode VEX.66.0F38 0x68 - invalid. */
878/* Opcode VEX.66.0F38 0x69 - invalid. */
879/* Opcode VEX.66.0F38 0x6a - invalid. */
880/* Opcode VEX.66.0F38 0x6b - invalid. */
881/* Opcode VEX.66.0F38 0x6c - invalid. */
882/* Opcode VEX.66.0F38 0x6d - invalid. */
883/* Opcode VEX.66.0F38 0x6e - invalid. */
884/* Opcode VEX.66.0F38 0x6f - invalid. */
885
886/* Opcode VEX.66.0F38 0x70 - invalid. */
887/* Opcode VEX.66.0F38 0x71 - invalid. */
888/* Opcode VEX.66.0F38 0x72 - invalid. */
889/* Opcode VEX.66.0F38 0x73 - invalid. */
890/* Opcode VEX.66.0F38 0x74 - invalid. */
891/* Opcode VEX.66.0F38 0x75 - invalid. */
892/* Opcode VEX.66.0F38 0x76 - invalid. */
893/* Opcode VEX.66.0F38 0x77 - invalid. */
894/** Opcode VEX.66.0F38 0x78. */
895FNIEMOP_STUB(iemOp_vpboardcastb_Vx_Wx);
896/** Opcode VEX.66.0F38 0x79. */
897FNIEMOP_STUB(iemOp_vpboardcastw_Vx_Wx);
898/* Opcode VEX.66.0F38 0x7a - invalid. */
899/* Opcode VEX.66.0F38 0x7b - invalid. */
900/* Opcode VEX.66.0F38 0x7c - invalid. */
901/* Opcode VEX.66.0F38 0x7d - invalid. */
902/* Opcode VEX.66.0F38 0x7e - invalid. */
903/* Opcode VEX.66.0F38 0x7f - invalid. */
904
905/* Opcode VEX.66.0F38 0x80 - invalid (legacy only). */
906/* Opcode VEX.66.0F38 0x81 - invalid (legacy only). */
907/* Opcode VEX.66.0F38 0x82 - invalid (legacy only). */
908/* Opcode VEX.66.0F38 0x83 - invalid. */
909/* Opcode VEX.66.0F38 0x84 - invalid. */
910/* Opcode VEX.66.0F38 0x85 - invalid. */
911/* Opcode VEX.66.0F38 0x86 - invalid. */
912/* Opcode VEX.66.0F38 0x87 - invalid. */
913/* Opcode VEX.66.0F38 0x88 - invalid. */
914/* Opcode VEX.66.0F38 0x89 - invalid. */
915/* Opcode VEX.66.0F38 0x8a - invalid. */
916/* Opcode VEX.66.0F38 0x8b - invalid. */
917/** Opcode VEX.66.0F38 0x8c. */
918FNIEMOP_STUB(iemOp_vpmaskmovd_q_Vx_Hx_Mx);
919/* Opcode VEX.66.0F38 0x8d - invalid. */
920/** Opcode VEX.66.0F38 0x8e. */
921FNIEMOP_STUB(iemOp_vpmaskmovd_q_Mx_Vx_Hx);
922/* Opcode VEX.66.0F38 0x8f - invalid. */
923
924/** Opcode VEX.66.0F38 0x90 (vex only). */
925FNIEMOP_STUB(iemOp_vgatherdd_q_Vx_Hx_Wx);
926/** Opcode VEX.66.0F38 0x91 (vex only). */
927FNIEMOP_STUB(iemOp_vgatherqd_q_Vx_Hx_Wx);
928/** Opcode VEX.66.0F38 0x92 (vex only). */
929FNIEMOP_STUB(iemOp_vgatherdps_d_Vx_Hx_Wx);
930/** Opcode VEX.66.0F38 0x93 (vex only). */
931FNIEMOP_STUB(iemOp_vgatherqps_d_Vx_Hx_Wx);
932/* Opcode VEX.66.0F38 0x94 - invalid. */
933/* Opcode VEX.66.0F38 0x95 - invalid. */
934/** Opcode VEX.66.0F38 0x96 (vex only). */
935FNIEMOP_STUB(iemOp_vfmaddsub132ps_q_Vx_Hx_Wx);
936/** Opcode VEX.66.0F38 0x97 (vex only). */
937FNIEMOP_STUB(iemOp_vfmsubadd132ps_d_Vx_Hx_Wx);
938/** Opcode VEX.66.0F38 0x98 (vex only). */
939FNIEMOP_STUB(iemOp_vfmadd132ps_d_Vx_Hx_Wx);
940/** Opcode VEX.66.0F38 0x99 (vex only). */
941FNIEMOP_STUB(iemOp_vfmadd132ss_d_Vx_Hx_Wx);
942/** Opcode VEX.66.0F38 0x9a (vex only). */
943FNIEMOP_STUB(iemOp_vfmsub132ps_d_Vx_Hx_Wx);
944/** Opcode VEX.66.0F38 0x9b (vex only). */
945FNIEMOP_STUB(iemOp_vfmsub132ss_d_Vx_Hx_Wx);
946/** Opcode VEX.66.0F38 0x9c (vex only). */
947FNIEMOP_STUB(iemOp_vfnmadd132ps_d_Vx_Hx_Wx);
948/** Opcode VEX.66.0F38 0x9d (vex only). */
949FNIEMOP_STUB(iemOp_vfnmadd132ss_d_Vx_Hx_Wx);
950/** Opcode VEX.66.0F38 0x9e (vex only). */
951FNIEMOP_STUB(iemOp_vfnmsub132ps_d_Vx_Hx_Wx);
952/** Opcode VEX.66.0F38 0x9f (vex only). */
953FNIEMOP_STUB(iemOp_vfnmsub132ss_d_Vx_Hx_Wx);
954
955/* Opcode VEX.66.0F38 0xa0 - invalid. */
956/* Opcode VEX.66.0F38 0xa1 - invalid. */
957/* Opcode VEX.66.0F38 0xa2 - invalid. */
958/* Opcode VEX.66.0F38 0xa3 - invalid. */
959/* Opcode VEX.66.0F38 0xa4 - invalid. */
960/* Opcode VEX.66.0F38 0xa5 - invalid. */
961/** Opcode VEX.66.0F38 0xa6 (vex only). */
962FNIEMOP_STUB(iemOp_vfmaddsub213ps_d_Vx_Hx_Wx);
963/** Opcode VEX.66.0F38 0xa7 (vex only). */
964FNIEMOP_STUB(iemOp_vfmsubadd213ps_d_Vx_Hx_Wx);
965/** Opcode VEX.66.0F38 0xa8 (vex only). */
966FNIEMOP_STUB(iemOp_vfmadd213ps_d_Vx_Hx_Wx);
967/** Opcode VEX.66.0F38 0xa9 (vex only). */
968FNIEMOP_STUB(iemOp_vfmadd213ss_d_Vx_Hx_Wx);
969/** Opcode VEX.66.0F38 0xaa (vex only). */
970FNIEMOP_STUB(iemOp_vfmsub213ps_d_Vx_Hx_Wx);
971/** Opcode VEX.66.0F38 0xab (vex only). */
972FNIEMOP_STUB(iemOp_vfmsub213ss_d_Vx_Hx_Wx);
973/** Opcode VEX.66.0F38 0xac (vex only). */
974FNIEMOP_STUB(iemOp_vfnmadd213ps_d_Vx_Hx_Wx);
975/** Opcode VEX.66.0F38 0xad (vex only). */
976FNIEMOP_STUB(iemOp_vfnmadd213ss_d_Vx_Hx_Wx);
977/** Opcode VEX.66.0F38 0xae (vex only). */
978FNIEMOP_STUB(iemOp_vfnmsub213ps_d_Vx_Hx_Wx);
979/** Opcode VEX.66.0F38 0xaf (vex only). */
980FNIEMOP_STUB(iemOp_vfnmsub213ss_d_Vx_Hx_Wx);
981
982/* Opcode VEX.66.0F38 0xb0 - invalid. */
983/* Opcode VEX.66.0F38 0xb1 - invalid. */
984/* Opcode VEX.66.0F38 0xb2 - invalid. */
985/* Opcode VEX.66.0F38 0xb3 - invalid. */
986/* Opcode VEX.66.0F38 0xb4 - invalid. */
987/* Opcode VEX.66.0F38 0xb5 - invalid. */
988/** Opcode VEX.66.0F38 0xb6 (vex only). */
989FNIEMOP_STUB(iemOp_vfmaddsub231ps_d_Vx_Hx_Wx);
990/** Opcode VEX.66.0F38 0xb7 (vex only). */
991FNIEMOP_STUB(iemOp_vfmsubadd231ps_d_Vx_Hx_Wx);
992/** Opcode VEX.66.0F38 0xb8 (vex only). */
993FNIEMOP_STUB(iemOp_vfmadd231ps_d_Vx_Hx_Wx);
994/** Opcode VEX.66.0F38 0xb9 (vex only). */
995FNIEMOP_STUB(iemOp_vfmadd231ss_d_Vx_Hx_Wx);
996/** Opcode VEX.66.0F38 0xba (vex only). */
997FNIEMOP_STUB(iemOp_vfmsub231ps_d_Vx_Hx_Wx);
998/** Opcode VEX.66.0F38 0xbb (vex only). */
999FNIEMOP_STUB(iemOp_vfmsub231ss_d_Vx_Hx_Wx);
1000/** Opcode VEX.66.0F38 0xbc (vex only). */
1001FNIEMOP_STUB(iemOp_vfnmadd231ps_d_Vx_Hx_Wx);
1002/** Opcode VEX.66.0F38 0xbd (vex only). */
1003FNIEMOP_STUB(iemOp_vfnmadd231ss_d_Vx_Hx_Wx);
1004/** Opcode VEX.66.0F38 0xbe (vex only). */
1005FNIEMOP_STUB(iemOp_vfnmsub231ps_d_Vx_Hx_Wx);
1006/** Opcode VEX.66.0F38 0xbf (vex only). */
1007FNIEMOP_STUB(iemOp_vfnmsub231ss_d_Vx_Hx_Wx);
1008
1009/* Opcode VEX.0F38 0xc0 - invalid. */
1010/* Opcode VEX.66.0F38 0xc0 - invalid. */
1011/* Opcode VEX.0F38 0xc1 - invalid. */
1012/* Opcode VEX.66.0F38 0xc1 - invalid. */
1013/* Opcode VEX.0F38 0xc2 - invalid. */
1014/* Opcode VEX.66.0F38 0xc2 - invalid. */
1015/* Opcode VEX.0F38 0xc3 - invalid. */
1016/* Opcode VEX.66.0F38 0xc3 - invalid. */
1017/* Opcode VEX.0F38 0xc4 - invalid. */
1018/* Opcode VEX.66.0F38 0xc4 - invalid. */
1019/* Opcode VEX.0F38 0xc5 - invalid. */
1020/* Opcode VEX.66.0F38 0xc5 - invalid. */
1021/* Opcode VEX.0F38 0xc6 - invalid. */
1022/* Opcode VEX.66.0F38 0xc6 - invalid. */
1023/* Opcode VEX.0F38 0xc7 - invalid. */
1024/* Opcode VEX.66.0F38 0xc7 - invalid. */
1025/** Opcode VEX.0F38 0xc8. */
1026FNIEMOP_STUB(iemOp_vsha1nexte_Vdq_Wdq);
1027/* Opcode VEX.66.0F38 0xc8 - invalid. */
1028/** Opcode VEX.0F38 0xc9. */
1029FNIEMOP_STUB(iemOp_vsha1msg1_Vdq_Wdq);
1030/* Opcode VEX.66.0F38 0xc9 - invalid. */
1031/** Opcode VEX.0F38 0xca. */
1032FNIEMOP_STUB(iemOp_vsha1msg2_Vdq_Wdq);
1033/* Opcode VEX.66.0F38 0xca - invalid. */
1034/** Opcode VEX.0F38 0xcb. */
1035FNIEMOP_STUB(iemOp_vsha256rnds2_Vdq_Wdq);
1036/* Opcode VEX.66.0F38 0xcb - invalid. */
1037/** Opcode VEX.0F38 0xcc. */
1038FNIEMOP_STUB(iemOp_vsha256msg1_Vdq_Wdq);
1039/* Opcode VEX.66.0F38 0xcc - invalid. */
1040/** Opcode VEX.0F38 0xcd. */
1041FNIEMOP_STUB(iemOp_vsha256msg2_Vdq_Wdq);
1042/* Opcode VEX.66.0F38 0xcd - invalid. */
1043/* Opcode VEX.0F38 0xce - invalid. */
1044/* Opcode VEX.66.0F38 0xce - invalid. */
1045/* Opcode VEX.0F38 0xcf - invalid. */
1046/* Opcode VEX.66.0F38 0xcf - invalid. */
1047
1048/* Opcode VEX.66.0F38 0xd0 - invalid. */
1049/* Opcode VEX.66.0F38 0xd1 - invalid. */
1050/* Opcode VEX.66.0F38 0xd2 - invalid. */
1051/* Opcode VEX.66.0F38 0xd3 - invalid. */
1052/* Opcode VEX.66.0F38 0xd4 - invalid. */
1053/* Opcode VEX.66.0F38 0xd5 - invalid. */
1054/* Opcode VEX.66.0F38 0xd6 - invalid. */
1055/* Opcode VEX.66.0F38 0xd7 - invalid. */
1056/* Opcode VEX.66.0F38 0xd8 - invalid. */
1057/* Opcode VEX.66.0F38 0xd9 - invalid. */
1058/* Opcode VEX.66.0F38 0xda - invalid. */
1059/** Opcode VEX.66.0F38 0xdb. */
1060FNIEMOP_STUB(iemOp_vaesimc_Vdq_Wdq);
1061/** Opcode VEX.66.0F38 0xdc. */
1062FNIEMOP_STUB(iemOp_vaesenc_Vdq_Wdq);
1063/** Opcode VEX.66.0F38 0xdd. */
1064FNIEMOP_STUB(iemOp_vaesenclast_Vdq_Wdq);
1065/** Opcode VEX.66.0F38 0xde. */
1066FNIEMOP_STUB(iemOp_vaesdec_Vdq_Wdq);
1067/** Opcode VEX.66.0F38 0xdf. */
1068FNIEMOP_STUB(iemOp_vaesdeclast_Vdq_Wdq);
1069
1070/* Opcode VEX.66.0F38 0xe0 - invalid. */
1071/* Opcode VEX.66.0F38 0xe1 - invalid. */
1072/* Opcode VEX.66.0F38 0xe2 - invalid. */
1073/* Opcode VEX.66.0F38 0xe3 - invalid. */
1074/* Opcode VEX.66.0F38 0xe4 - invalid. */
1075/* Opcode VEX.66.0F38 0xe5 - invalid. */
1076/* Opcode VEX.66.0F38 0xe6 - invalid. */
1077/* Opcode VEX.66.0F38 0xe7 - invalid. */
1078/* Opcode VEX.66.0F38 0xe8 - invalid. */
1079/* Opcode VEX.66.0F38 0xe9 - invalid. */
1080/* Opcode VEX.66.0F38 0xea - invalid. */
1081/* Opcode VEX.66.0F38 0xeb - invalid. */
1082/* Opcode VEX.66.0F38 0xec - invalid. */
1083/* Opcode VEX.66.0F38 0xed - invalid. */
1084/* Opcode VEX.66.0F38 0xee - invalid. */
1085/* Opcode VEX.66.0F38 0xef - invalid. */
1086
1087
1088/* Opcode VEX.0F38 0xf0 - invalid (legacy only). */
1089/* Opcode VEX.66.0F38 0xf0 - invalid (legacy only). */
1090/* Opcode VEX.F3.0F38 0xf0 - invalid. */
1091/* Opcode VEX.F2.0F38 0xf0 - invalid (legacy only). */
1092
1093/* Opcode VEX.0F38 0xf1 - invalid (legacy only). */
1094/* Opcode VEX.66.0F38 0xf1 - invalid (legacy only). */
1095/* Opcode VEX.F3.0F38 0xf1 - invalid. */
1096/* Opcode VEX.F2.0F38 0xf1 - invalid (legacy only). */
1097
1098/** Opcode VEX.0F38 0xf2 - ANDN (vex only). */
1099FNIEMOP_DEF(iemOp_andn_Gy_By_Ey)
1100{
1101 IEMOP_MNEMONIC3(VEX_RVM, ANDN, andn, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1102 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
1103 return iemOp_InvalidNeedRM(pVCpu);
1104 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF);
1105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1106 if (IEM_IS_MODRM_REG_MODE(bRm))
1107 {
1108 /*
1109 * Register, register.
1110 */
1111 IEMOP_HLP_DONE_VEX_DECODING_L0();
1112 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1113 {
1114 IEM_MC_BEGIN(4, 0);
1115 IEM_MC_ARG(uint64_t *, pDst, 0);
1116 IEM_MC_ARG(uint64_t, uSrc1, 1);
1117 IEM_MC_ARG(uint64_t, uSrc2, 2);
1118 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1119 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1121 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1122 IEM_MC_REF_EFLAGS(pEFlags);
1123 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
1124 pDst, uSrc1, uSrc2, pEFlags);
1125 IEM_MC_ADVANCE_RIP_AND_FINISH();
1126 IEM_MC_END();
1127 }
1128 else
1129 {
1130 IEM_MC_BEGIN(4, 0);
1131 IEM_MC_ARG(uint32_t *, pDst, 0);
1132 IEM_MC_ARG(uint32_t, uSrc1, 1);
1133 IEM_MC_ARG(uint32_t, uSrc2, 2);
1134 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1135 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1136 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1137 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1138 IEM_MC_REF_EFLAGS(pEFlags);
1139 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
1140 pDst, uSrc1, uSrc2, pEFlags);
1141 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
1142 IEM_MC_ADVANCE_RIP_AND_FINISH();
1143 IEM_MC_END();
1144 }
1145 }
1146 else
1147 {
1148 /*
1149 * Register, memory.
1150 */
1151 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1152 {
1153 IEM_MC_BEGIN(4, 1);
1154 IEM_MC_ARG(uint64_t *, pDst, 0);
1155 IEM_MC_ARG(uint64_t, uSrc1, 1);
1156 IEM_MC_ARG(uint64_t, uSrc2, 2);
1157 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1160 IEMOP_HLP_DONE_VEX_DECODING_L0();
1161 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1162 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1163 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1164 IEM_MC_REF_EFLAGS(pEFlags);
1165 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
1166 pDst, uSrc1, uSrc2, pEFlags);
1167 IEM_MC_ADVANCE_RIP_AND_FINISH();
1168 IEM_MC_END();
1169 }
1170 else
1171 {
1172 IEM_MC_BEGIN(4, 1);
1173 IEM_MC_ARG(uint32_t *, pDst, 0);
1174 IEM_MC_ARG(uint32_t, uSrc1, 1);
1175 IEM_MC_ARG(uint32_t, uSrc2, 2);
1176 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1179 IEMOP_HLP_DONE_VEX_DECODING_L0();
1180 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1181 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1182 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1183 IEM_MC_REF_EFLAGS(pEFlags);
1184 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
1185 pDst, uSrc1, uSrc2, pEFlags);
1186 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
1187 IEM_MC_ADVANCE_RIP_AND_FINISH();
1188 IEM_MC_END();
1189 }
1190 }
1191}
1192
1193/* Opcode VEX.66.0F38 0xf2 - invalid. */
1194/* Opcode VEX.F3.0F38 0xf2 - invalid. */
1195/* Opcode VEX.F2.0F38 0xf2 - invalid. */
1196
1197
1198/* Opcode VEX.0F38 0xf3 - invalid. */
1199/* Opcode VEX.66.0F38 0xf3 - invalid. */
1200
1201/* Opcode VEX.F3.0F38 0xf3 /0 - invalid. */
1202
1203/** Body for the vex group 17 instructions. */
1204#define IEMOP_BODY_By_Ey(a_Instr) \
1205 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1) \
1206 return iemOp_InvalidWithRM(pVCpu, bRm); /* decode memory variant? */ \
1207 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF); \
1208 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1209 { \
1210 /* \
1211 * Register, register. \
1212 */ \
1213 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1214 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1215 { \
1216 IEM_MC_BEGIN(3, 0); \
1217 IEM_MC_ARG(uint64_t *, pDst, 0); \
1218 IEM_MC_ARG(uint64_t, uSrc, 1); \
1219 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1220 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1221 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1222 IEM_MC_REF_EFLAGS(pEFlags); \
1223 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
1224 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \
1225 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1226 IEM_MC_END(); \
1227 } \
1228 else \
1229 { \
1230 IEM_MC_BEGIN(3, 0); \
1231 IEM_MC_ARG(uint32_t *, pDst, 0); \
1232 IEM_MC_ARG(uint32_t, uSrc, 1); \
1233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1234 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1235 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1236 IEM_MC_REF_EFLAGS(pEFlags); \
1237 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
1238 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \
1239 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1241 IEM_MC_END(); \
1242 } \
1243 } \
1244 else \
1245 { \
1246 /* \
1247 * Register, memory. \
1248 */ \
1249 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1250 { \
1251 IEM_MC_BEGIN(3, 1); \
1252 IEM_MC_ARG(uint64_t *, pDst, 0); \
1253 IEM_MC_ARG(uint64_t, uSrc, 1); \
1254 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1257 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1258 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1259 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1260 IEM_MC_REF_EFLAGS(pEFlags); \
1261 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
1262 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \
1263 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1264 IEM_MC_END(); \
1265 } \
1266 else \
1267 { \
1268 IEM_MC_BEGIN(3, 1); \
1269 IEM_MC_ARG(uint32_t *, pDst, 0); \
1270 IEM_MC_ARG(uint32_t, uSrc, 1); \
1271 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1274 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1275 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1276 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1277 IEM_MC_REF_EFLAGS(pEFlags); \
1278 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
1279 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \
1280 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1281 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1282 IEM_MC_END(); \
1283 } \
1284 } \
1285 (void)0
1286
1287
1288/* Opcode VEX.F3.0F38 0xf3 /1. */
1289/** @opcode /1
1290 * @opmaps vexgrp17 */
1291FNIEMOP_DEF_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm)
1292{
1293 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1294 IEMOP_BODY_By_Ey(blsr);
1295}
1296
1297
1298/* Opcode VEX.F3.0F38 0xf3 /2. */
1299/** @opcode /2
1300 * @opmaps vexgrp17 */
1301FNIEMOP_DEF_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm)
1302{
1303 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1304 IEMOP_BODY_By_Ey(blsmsk);
1305}
1306
1307
1308/* Opcode VEX.F3.0F38 0xf3 /3. */
1309/** @opcode /3
1310 * @opmaps vexgrp17 */
1311FNIEMOP_DEF_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm)
1312{
1313 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1314 IEMOP_BODY_By_Ey(blsi);
1315}
1316
1317
1318/* Opcode VEX.F3.0F38 0xf3 /4 - invalid. */
1319/* Opcode VEX.F3.0F38 0xf3 /5 - invalid. */
1320/* Opcode VEX.F3.0F38 0xf3 /6 - invalid. */
1321/* Opcode VEX.F3.0F38 0xf3 /7 - invalid. */
1322
1323/**
1324 * Group 17 jump table for the VEX.F3 variant.
1325 */
1326IEM_STATIC const PFNIEMOPRM g_apfnVexGroup17_f3[] =
1327{
1328 /* /0 */ iemOp_InvalidWithRM,
1329 /* /1 */ iemOp_VGrp17_blsr_By_Ey,
1330 /* /2 */ iemOp_VGrp17_blsmsk_By_Ey,
1331 /* /3 */ iemOp_VGrp17_blsi_By_Ey,
1332 /* /4 */ iemOp_InvalidWithRM,
1333 /* /5 */ iemOp_InvalidWithRM,
1334 /* /6 */ iemOp_InvalidWithRM,
1335 /* /7 */ iemOp_InvalidWithRM
1336};
1337AssertCompile(RT_ELEMENTS(g_apfnVexGroup17_f3) == 8);
1338
1339/** Opcode VEX.F3.0F38 0xf3 - invalid (vex only - group 17). */
1340FNIEMOP_DEF(iemOp_VGrp17_f3)
1341{
1342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1343 return FNIEMOP_CALL_1(g_apfnVexGroup17_f3[IEM_GET_MODRM_REG_8(bRm)], bRm);
1344}
1345
1346/* Opcode VEX.F2.0F38 0xf3 - invalid (vex only - group 17). */
1347
1348
1349/* Opcode VEX.0F38 0xf4 - invalid. */
1350/* Opcode VEX.66.0F38 0xf4 - invalid. */
1351/* Opcode VEX.F3.0F38 0xf4 - invalid. */
1352/* Opcode VEX.F2.0F38 0xf4 - invalid. */
1353
1354/** Body for BZHI, BEXTR, ++; assumes VEX.L must be 0. */
1355#define IEMOP_BODY_Gy_Ey_By(a_Instr, a_fFeatureMember, a_fUndefFlags) \
1356 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeatureMember) \
1357 return iemOp_InvalidNeedRM(pVCpu); \
1358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
1359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1360 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1361 { \
1362 /* \
1363 * Register, register. \
1364 */ \
1365 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1366 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1367 { \
1368 IEM_MC_BEGIN(4, 0); \
1369 IEM_MC_ARG(uint64_t *, pDst, 0); \
1370 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1371 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1372 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1373 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1374 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1375 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1376 IEM_MC_REF_EFLAGS(pEFlags); \
1377 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1378 iemAImpl_ ## a_Instr ## _u64_fallback), \
1379 pDst, uSrc1, uSrc2, pEFlags); \
1380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1381 IEM_MC_END(); \
1382 } \
1383 else \
1384 { \
1385 IEM_MC_BEGIN(4, 0); \
1386 IEM_MC_ARG(uint32_t *, pDst, 0); \
1387 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1388 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1389 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1390 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1391 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1392 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1393 IEM_MC_REF_EFLAGS(pEFlags); \
1394 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1395 iemAImpl_ ## a_Instr ## _u32_fallback), \
1396 pDst, uSrc1, uSrc2, pEFlags); \
1397 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1398 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1399 IEM_MC_END(); \
1400 } \
1401 } \
1402 else \
1403 { \
1404 /* \
1405 * Register, memory. \
1406 */ \
1407 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1408 { \
1409 IEM_MC_BEGIN(4, 1); \
1410 IEM_MC_ARG(uint64_t *, pDst, 0); \
1411 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1412 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1413 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1416 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1417 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1418 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1419 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1420 IEM_MC_REF_EFLAGS(pEFlags); \
1421 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1422 iemAImpl_ ## a_Instr ## _u64_fallback), \
1423 pDst, uSrc1, uSrc2, pEFlags); \
1424 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1425 IEM_MC_END(); \
1426 } \
1427 else \
1428 { \
1429 IEM_MC_BEGIN(4, 1); \
1430 IEM_MC_ARG(uint32_t *, pDst, 0); \
1431 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1432 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1433 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1436 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1437 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1438 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1439 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1440 IEM_MC_REF_EFLAGS(pEFlags); \
1441 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1442 iemAImpl_ ## a_Instr ## _u32_fallback), \
1443 pDst, uSrc1, uSrc2, pEFlags); \
1444 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1445 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1446 IEM_MC_END(); \
1447 } \
1448 } \
1449 (void)0
1450
1451/** Body for SARX, SHLX, SHRX; assumes VEX.L must be 0. */
1452#define IEMOP_BODY_Gy_Ey_By_NoEflags(a_Instr, a_fFeatureMember, a_fUndefFlags) \
1453 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeatureMember) \
1454 return iemOp_InvalidNeedRM(pVCpu); \
1455 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
1456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1457 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1458 { \
1459 /* \
1460 * Register, register. \
1461 */ \
1462 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1463 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1464 { \
1465 IEM_MC_BEGIN(3, 0); \
1466 IEM_MC_ARG(uint64_t *, pDst, 0); \
1467 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1468 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1469 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1470 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1471 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1472 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1473 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
1474 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1475 IEM_MC_END(); \
1476 } \
1477 else \
1478 { \
1479 IEM_MC_BEGIN(3, 0); \
1480 IEM_MC_ARG(uint32_t *, pDst, 0); \
1481 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1482 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1483 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1484 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1485 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1486 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1487 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
1488 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1489 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1490 IEM_MC_END(); \
1491 } \
1492 } \
1493 else \
1494 { \
1495 /* \
1496 * Register, memory. \
1497 */ \
1498 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1499 { \
1500 IEM_MC_BEGIN(3, 1); \
1501 IEM_MC_ARG(uint64_t *, pDst, 0); \
1502 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1503 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1506 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1507 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1508 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1509 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1510 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1511 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
1512 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1513 IEM_MC_END(); \
1514 } \
1515 else \
1516 { \
1517 IEM_MC_BEGIN(3, 1); \
1518 IEM_MC_ARG(uint32_t *, pDst, 0); \
1519 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1520 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1523 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1524 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1525 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1526 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1527 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1528 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
1529 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1530 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1531 IEM_MC_END(); \
1532 } \
1533 } \
1534 (void)0
1535
1536/** Opcode VEX.0F38 0xf5 (vex only). */
1537FNIEMOP_DEF(iemOp_bzhi_Gy_Ey_By)
1538{
1539 IEMOP_MNEMONIC3(VEX_RMV, BZHI, bzhi, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1540 IEMOP_BODY_Gy_Ey_By(bzhi, fBmi2, X86_EFL_AF | X86_EFL_PF);
1541}
1542
1543/* Opcode VEX.66.0F38 0xf5 - invalid. */
1544
1545/** Body for PDEP and PEXT (similar to ANDN, except no EFLAGS). */
1546#define IEMOP_BODY_Gy_By_Ey_NoEflags(a_Instr, a_fFeatureMember) \
1547 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeatureMember) \
1548 return iemOp_InvalidNeedRM(pVCpu); \
1549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1550 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1551 { \
1552 /* \
1553 * Register, register. \
1554 */ \
1555 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1556 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1557 { \
1558 IEM_MC_BEGIN(3, 0); \
1559 IEM_MC_ARG(uint64_t *, pDst, 0); \
1560 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1561 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1562 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1563 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1564 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1565 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
1566 iemAImpl_ ## a_Instr ## _u64, \
1567 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
1568 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1569 IEM_MC_END(); \
1570 } \
1571 else \
1572 { \
1573 IEM_MC_BEGIN(3, 0); \
1574 IEM_MC_ARG(uint32_t *, pDst, 0); \
1575 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1576 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1577 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1578 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1579 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1580 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
1581 iemAImpl_ ## a_Instr ## _u32, \
1582 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
1583 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1584 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1585 IEM_MC_END(); \
1586 } \
1587 } \
1588 else \
1589 { \
1590 /* \
1591 * Register, memory. \
1592 */ \
1593 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1594 { \
1595 IEM_MC_BEGIN(3, 1); \
1596 IEM_MC_ARG(uint64_t *, pDst, 0); \
1597 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1598 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1601 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1602 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1603 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1604 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1605 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
1606 iemAImpl_ ## a_Instr ## _u64, \
1607 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
1608 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1609 IEM_MC_END(); \
1610 } \
1611 else \
1612 { \
1613 IEM_MC_BEGIN(3, 1); \
1614 IEM_MC_ARG(uint32_t *, pDst, 0); \
1615 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1616 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1619 IEMOP_HLP_DONE_VEX_DECODING_L0(); \
1620 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1621 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1622 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1623 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
1624 iemAImpl_ ## a_Instr ## _u32, \
1625 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
1626 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1627 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1628 IEM_MC_END(); \
1629 } \
1630 } \
1631 (void)0
1632
1633
1634/** Opcode VEX.F3.0F38 0xf5 (vex only). */
1635FNIEMOP_DEF(iemOp_pext_Gy_By_Ey)
1636{
1637 IEMOP_MNEMONIC3(VEX_RVM, PEXT, pext, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1638 IEMOP_BODY_Gy_By_Ey_NoEflags(pext, fBmi2);
1639}
1640
1641
1642/** Opcode VEX.F2.0F38 0xf5 (vex only). */
1643FNIEMOP_DEF(iemOp_pdep_Gy_By_Ey)
1644{
1645 IEMOP_MNEMONIC3(VEX_RVM, PDEP, pdep, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1646 IEMOP_BODY_Gy_By_Ey_NoEflags(pdep, fBmi2);
1647}
1648
1649
1650/* Opcode VEX.0F38 0xf6 - invalid. */
1651/* Opcode VEX.66.0F38 0xf6 - invalid (legacy only). */
1652/* Opcode VEX.F3.0F38 0xf6 - invalid (legacy only). */
1653
1654
1655/** Opcode VEX.F2.0F38 0xf6 (vex only) */
1656FNIEMOP_DEF(iemOp_mulx_By_Gy_rDX_Ey)
1657{
1658 IEMOP_MNEMONIC4(VEX_RVM, MULX, mulx, Gy, By, Ey, rDX, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1659 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi2)
1660 return iemOp_InvalidNeedRM(pVCpu);
1661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1662 if (IEM_IS_MODRM_REG_MODE(bRm))
1663 {
1664 /*
1665 * Register, register.
1666 */
1667 IEMOP_HLP_DONE_VEX_DECODING_L0();
1668 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1669 {
1670 IEM_MC_BEGIN(4, 0);
1671 IEM_MC_ARG(uint64_t *, pDst1, 0);
1672 IEM_MC_ARG(uint64_t *, pDst2, 1);
1673 IEM_MC_ARG(uint64_t, uSrc1, 2);
1674 IEM_MC_ARG(uint64_t, uSrc2, 3);
1675 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
1676 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1677 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
1678 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1679 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
1680 pDst1, pDst2, uSrc1, uSrc2);
1681 IEM_MC_ADVANCE_RIP_AND_FINISH();
1682 IEM_MC_END();
1683 }
1684 else
1685 {
1686 IEM_MC_BEGIN(4, 0);
1687 IEM_MC_ARG(uint32_t *, pDst1, 0);
1688 IEM_MC_ARG(uint32_t *, pDst2, 1);
1689 IEM_MC_ARG(uint32_t, uSrc1, 2);
1690 IEM_MC_ARG(uint32_t, uSrc2, 3);
1691 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
1692 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1693 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
1694 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1695 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
1696 pDst1, pDst2, uSrc1, uSrc2);
1697 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst2);
1698 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst1);
1699 IEM_MC_ADVANCE_RIP_AND_FINISH();
1700 IEM_MC_END();
1701 }
1702 }
1703 else
1704 {
1705 /*
1706 * Register, memory.
1707 */
1708 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1709 {
1710 IEM_MC_BEGIN(4, 1);
1711 IEM_MC_ARG(uint64_t *, pDst1, 0);
1712 IEM_MC_ARG(uint64_t *, pDst2, 1);
1713 IEM_MC_ARG(uint64_t, uSrc1, 2);
1714 IEM_MC_ARG(uint64_t, uSrc2, 3);
1715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1717 IEMOP_HLP_DONE_VEX_DECODING_L0();
1718 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1719 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
1720 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1721 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
1722 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
1723 pDst1, pDst2, uSrc1, uSrc2);
1724 IEM_MC_ADVANCE_RIP_AND_FINISH();
1725 IEM_MC_END();
1726 }
1727 else
1728 {
1729 IEM_MC_BEGIN(4, 1);
1730 IEM_MC_ARG(uint32_t *, pDst1, 0);
1731 IEM_MC_ARG(uint32_t *, pDst2, 1);
1732 IEM_MC_ARG(uint32_t, uSrc1, 2);
1733 IEM_MC_ARG(uint32_t, uSrc2, 3);
1734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1736 IEMOP_HLP_DONE_VEX_DECODING_L0();
1737 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1738 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
1739 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1740 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
1741 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
1742 pDst1, pDst2, uSrc1, uSrc2);
1743 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst2);
1744 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst1);
1745 IEM_MC_ADVANCE_RIP_AND_FINISH();
1746 IEM_MC_END();
1747 }
1748 }
1749}
1750
1751
1752/** Opcode VEX.0F38 0xf7 (vex only). */
1753FNIEMOP_DEF(iemOp_bextr_Gy_Ey_By)
1754{
1755 IEMOP_MNEMONIC3(VEX_RMV, BEXTR, bextr, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1756 IEMOP_BODY_Gy_Ey_By(bextr, fBmi1, X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
1757}
1758
1759
1760/** Opcode VEX.66.0F38 0xf7 (vex only). */
1761FNIEMOP_DEF(iemOp_shlx_Gy_Ey_By)
1762{
1763 IEMOP_MNEMONIC3(VEX_RMV, SHLX, shlx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1764 IEMOP_BODY_Gy_Ey_By_NoEflags(shlx, fBmi2, 0);
1765}
1766
1767
1768/** Opcode VEX.F3.0F38 0xf7 (vex only). */
1769FNIEMOP_DEF(iemOp_sarx_Gy_Ey_By)
1770{
1771 IEMOP_MNEMONIC3(VEX_RMV, SARX, sarx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1772 IEMOP_BODY_Gy_Ey_By_NoEflags(sarx, fBmi2, 0);
1773}
1774
1775
1776/** Opcode VEX.F2.0F38 0xf7 (vex only). */
1777FNIEMOP_DEF(iemOp_shrx_Gy_Ey_By)
1778{
1779 IEMOP_MNEMONIC3(VEX_RMV, SHRX, shrx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1780 IEMOP_BODY_Gy_Ey_By_NoEflags(shrx, fBmi2, 0);
1781}
1782
1783/* Opcode VEX.0F38 0xf8 - invalid. */
1784/* Opcode VEX.66.0F38 0xf8 - invalid. */
1785/* Opcode VEX.F3.0F38 0xf8 - invalid. */
1786/* Opcode VEX.F2.0F38 0xf8 - invalid. */
1787
1788/* Opcode VEX.0F38 0xf9 - invalid. */
1789/* Opcode VEX.66.0F38 0xf9 - invalid. */
1790/* Opcode VEX.F3.0F38 0xf9 - invalid. */
1791/* Opcode VEX.F2.0F38 0xf9 - invalid. */
1792
1793/* Opcode VEX.0F38 0xfa - invalid. */
1794/* Opcode VEX.66.0F38 0xfa - invalid. */
1795/* Opcode VEX.F3.0F38 0xfa - invalid. */
1796/* Opcode VEX.F2.0F38 0xfa - invalid. */
1797
1798/* Opcode VEX.0F38 0xfb - invalid. */
1799/* Opcode VEX.66.0F38 0xfb - invalid. */
1800/* Opcode VEX.F3.0F38 0xfb - invalid. */
1801/* Opcode VEX.F2.0F38 0xfb - invalid. */
1802
1803/* Opcode VEX.0F38 0xfc - invalid. */
1804/* Opcode VEX.66.0F38 0xfc - invalid. */
1805/* Opcode VEX.F3.0F38 0xfc - invalid. */
1806/* Opcode VEX.F2.0F38 0xfc - invalid. */
1807
1808/* Opcode VEX.0F38 0xfd - invalid. */
1809/* Opcode VEX.66.0F38 0xfd - invalid. */
1810/* Opcode VEX.F3.0F38 0xfd - invalid. */
1811/* Opcode VEX.F2.0F38 0xfd - invalid. */
1812
1813/* Opcode VEX.0F38 0xfe - invalid. */
1814/* Opcode VEX.66.0F38 0xfe - invalid. */
1815/* Opcode VEX.F3.0F38 0xfe - invalid. */
1816/* Opcode VEX.F2.0F38 0xfe - invalid. */
1817
1818/* Opcode VEX.0F38 0xff - invalid. */
1819/* Opcode VEX.66.0F38 0xff - invalid. */
1820/* Opcode VEX.F3.0F38 0xff - invalid. */
1821/* Opcode VEX.F2.0F38 0xff - invalid. */
1822
1823
1824/**
1825 * VEX opcode map \#2.
1826 *
1827 * @sa g_apfnThreeByte0f38
1828 */
1829IEM_STATIC const PFNIEMOP g_apfnVexMap2[] =
1830{
1831 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1832 /* 0x00 */ iemOp_InvalidNeedRM, iemOp_vpshufb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1833 /* 0x01 */ iemOp_InvalidNeedRM, iemOp_vphaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1834 /* 0x02 */ iemOp_InvalidNeedRM, iemOp_vphaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1835 /* 0x03 */ iemOp_InvalidNeedRM, iemOp_vphaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1836 /* 0x04 */ iemOp_InvalidNeedRM, iemOp_vpmaddubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1837 /* 0x05 */ iemOp_InvalidNeedRM, iemOp_vphsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1838 /* 0x06 */ iemOp_InvalidNeedRM, iemOp_vphsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1839 /* 0x07 */ iemOp_InvalidNeedRM, iemOp_vphsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1840 /* 0x08 */ iemOp_InvalidNeedRM, iemOp_vpsignb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1841 /* 0x09 */ iemOp_InvalidNeedRM, iemOp_vpsignw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1842 /* 0x0a */ iemOp_InvalidNeedRM, iemOp_vpsignd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1843 /* 0x0b */ iemOp_InvalidNeedRM, iemOp_vpmulhrsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1844 /* 0x0c */ iemOp_InvalidNeedRM, iemOp_vpermilps_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1845 /* 0x0d */ iemOp_InvalidNeedRM, iemOp_vpermilpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1846 /* 0x0e */ iemOp_InvalidNeedRM, iemOp_vtestps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1847 /* 0x0f */ iemOp_InvalidNeedRM, iemOp_vtestpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1848
1849 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRM),
1850 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
1851 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
1852 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
1853 /* 0x14 */ IEMOP_X4(iemOp_InvalidNeedRM),
1854 /* 0x15 */ IEMOP_X4(iemOp_InvalidNeedRM),
1855 /* 0x16 */ iemOp_InvalidNeedRM, iemOp_vpermps_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1856 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_vptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1857 /* 0x18 */ iemOp_InvalidNeedRM, iemOp_vbroadcastss_Vx_Wd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1858 /* 0x19 */ iemOp_InvalidNeedRM, iemOp_vbroadcastsd_Vqq_Wq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1859 /* 0x1a */ iemOp_InvalidNeedRM, iemOp_vbroadcastf128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1860 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
1861 /* 0x1c */ iemOp_InvalidNeedRM, iemOp_vpabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1862 /* 0x1d */ iemOp_InvalidNeedRM, iemOp_vpabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1863 /* 0x1e */ iemOp_InvalidNeedRM, iemOp_vpabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1864 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
1865
1866 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1867 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1868 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1869 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1870 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1871 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_vpmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1872 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
1873 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
1874 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_vpmuldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1875 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1876 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_vmovntdqa_Vx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1877 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_vpackusdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1878 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1879 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1880 /* 0x2e */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1881 /* 0x2f */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1882
1883 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1884 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1885 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1886 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1887 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1888 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_vpmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1889 /* 0x36 */ iemOp_InvalidNeedRM, iemOp_vpermd_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1890 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1891 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_vpminsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1892 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_vpminsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1893 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_vpminuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1894 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_vpminud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1895 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_vpmaxsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1896 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_vpmaxsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1897 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_vpmaxuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1898 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_vpmaxud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1899
1900 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_vpmulld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1901 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_vphminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1902 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
1903 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
1904 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
1905 /* 0x45 */ iemOp_InvalidNeedRM, iemOp_vpsrlvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1906 /* 0x46 */ iemOp_InvalidNeedRM, iemOp_vsravd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1907 /* 0x47 */ iemOp_InvalidNeedRM, iemOp_vpsllvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1908 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
1909 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
1910 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
1911 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
1912 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
1913 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
1914 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
1915 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
1916
1917 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
1918 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
1919 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
1920 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
1921 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
1922 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
1923 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
1924 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
1925 /* 0x58 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1926 /* 0x59 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1927 /* 0x5a */ iemOp_InvalidNeedRM, iemOp_vbroadcasti128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1928 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
1929 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
1930 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
1931 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
1932 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
1933
1934 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
1935 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
1936 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
1937 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
1938 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
1939 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
1940 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
1941 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
1942 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
1943 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
1944 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
1945 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
1946 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
1947 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
1948 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
1949 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
1950
1951 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
1952 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
1953 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
1954 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
1955 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
1956 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
1957 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
1958 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
1959 /* 0x78 */ iemOp_InvalidNeedRM, iemOp_vpboardcastb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1960 /* 0x79 */ iemOp_InvalidNeedRM, iemOp_vpboardcastw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1961 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
1962 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
1963 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
1964 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
1965 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
1966 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
1967
1968 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
1969 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
1970 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
1971 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
1972 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
1973 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
1974 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
1975 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
1976 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
1977 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
1978 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
1979 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
1980 /* 0x8c */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1981 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
1982 /* 0x8e */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Mx_Vx_Hx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1983 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
1984
1985 /* 0x90 */ iemOp_InvalidNeedRM, iemOp_vgatherdd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1986 /* 0x91 */ iemOp_InvalidNeedRM, iemOp_vgatherqd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1987 /* 0x92 */ iemOp_InvalidNeedRM, iemOp_vgatherdps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1988 /* 0x93 */ iemOp_InvalidNeedRM, iemOp_vgatherqps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1989 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
1990 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
1991 /* 0x96 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub132ps_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1992 /* 0x97 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1993 /* 0x98 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1994 /* 0x99 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1995 /* 0x9a */ iemOp_InvalidNeedRM, iemOp_vfmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1996 /* 0x9b */ iemOp_InvalidNeedRM, iemOp_vfmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1997 /* 0x9c */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1998 /* 0x9d */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1999 /* 0x9e */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2000 /* 0x9f */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2001
2002 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2003 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2004 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2005 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2006 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2007 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2008 /* 0xa6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2009 /* 0xa7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2010 /* 0xa8 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2011 /* 0xa9 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2012 /* 0xaa */ iemOp_InvalidNeedRM, iemOp_vfmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2013 /* 0xab */ iemOp_InvalidNeedRM, iemOp_vfmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2014 /* 0xac */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2015 /* 0xad */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2016 /* 0xae */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2017 /* 0xaf */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2018
2019 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2020 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2021 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2022 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2023 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2024 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2025 /* 0xb6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2026 /* 0xb7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2027 /* 0xb8 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2028 /* 0xb9 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2029 /* 0xba */ iemOp_InvalidNeedRM, iemOp_vfmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2030 /* 0xbb */ iemOp_InvalidNeedRM, iemOp_vfmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2031 /* 0xbc */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2032 /* 0xbd */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2033 /* 0xbe */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2034 /* 0xbf */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2035
2036 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2037 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2038 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2039 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2040 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2041 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2042 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2043 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2044 /* 0xc8 */ iemOp_vsha1nexte_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2045 /* 0xc9 */ iemOp_vsha1msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2046 /* 0xca */ iemOp_vsha1msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2047 /* 0xcb */ iemOp_vsha256rnds2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2048 /* 0xcc */ iemOp_vsha256msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2049 /* 0xcd */ iemOp_vsha256msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2050 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
2051 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
2052
2053 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2054 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2055 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2056 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2057 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2058 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2059 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2060 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2061 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2062 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2063 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
2064 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vaesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2065 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vaesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2066 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vaesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2067 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vaesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2068 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vaesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2069
2070 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2071 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2072 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2073 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2074 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2075 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2076 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2077 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2078 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2079 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2080 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
2081 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
2082 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
2083 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
2084 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
2085 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
2086
2087 /* 0xf0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2088 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2089 /* 0xf2 */ iemOp_andn_Gy_By_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2090 /* 0xf3 */ iemOp_VGrp17_f3, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2091 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2092 /* 0xf5 */ iemOp_bzhi_Gy_Ey_By, iemOp_InvalidNeedRM, iemOp_pext_Gy_By_Ey, iemOp_pdep_Gy_By_Ey,
2093 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_mulx_By_Gy_rDX_Ey,
2094 /* 0xf7 */ iemOp_bextr_Gy_Ey_By, iemOp_shlx_Gy_Ey_By, iemOp_sarx_Gy_Ey_By, iemOp_shrx_Gy_Ey_By,
2095 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2096 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2097 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
2098 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
2099 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
2100 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
2101 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
2102 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
2103};
2104AssertCompile(RT_ELEMENTS(g_apfnVexMap2) == 1024);
2105
2106/** @} */
2107
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette