VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap2.cpp.h@ 105318

Last change on this file since 105318 was 105307, checked in by vboxsync, 7 months ago

VMM/IEM: Implement vpermd, vpermps instruction emulations, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 145.4 KB
Line 
1/* $Id: IEMAllInstVexMap2.cpp.h 105307 2024-07-12 13:39:37Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstThree0f38.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 2
33 * @{
34 */
35
36/**
37 * Common worker for AESNI/AVX instructions on the forms:
38 * - vaesxxx xmm0, xmm1, xmm2/mem128
39 *
40 * Exceptions type 4. AVX and AESNI cpuid check for 128-bit operation.
41 */
42FNIEMOP_DEF_1(iemOpCommonAvxAesNi_Vx_Hx_Wx, PFNIEMAIMPLMEDIAOPTF3U128, pfnU128)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
51 IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(fAvx, fAesNi);
52 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
53 IEM_MC_PREPARE_AVX_USAGE();
54
55 IEM_MC_LOCAL(RTUINT128U, uDst);
56 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, puDst, uDst, 0);
57 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
58 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
59 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
60 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
61 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc1, puSrc2);
62 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
63 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
64 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
75 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
76 IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(fAvx, fAesNi);
77 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
78 IEM_MC_PREPARE_AVX_USAGE();
79
80 IEM_MC_LOCAL(RTUINT128U, uDst);
81 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, puDst, uDst, 0);
82 IEM_MC_LOCAL(RTUINT128U, uSrc2);
83 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
84 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
85 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
86 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
87
88 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc1, puSrc2);
89 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
90 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
91 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
92 IEM_MC_ADVANCE_RIP_AND_FINISH();
93 IEM_MC_END();
94 }
95}
96
97
98/* Opcode VEX.0F38 0x00 - invalid. */
99
100
101/** Opcode VEX.66.0F38 0x00. */
102FNIEMOP_DEF(iemOp_vpshufb_Vx_Hx_Wx)
103{
104 IEMOP_MNEMONIC3(VEX_RVM, VPSHUFB, vpshufb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
105 IEMOPMEDIAOPTF3_INIT_VARS( vpshufb);
106 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
107}
108
109
110/* Opcode VEX.0F38 0x01 - invalid. */
111
112
113/** Opcode VEX.66.0F38 0x01. */
114FNIEMOP_DEF(iemOp_vphaddw_Vx_Hx_Wx)
115{
116 IEMOP_MNEMONIC3(VEX_RVM, VPHADDW, vphaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
117 IEMOPMEDIAOPTF3_INIT_VARS(vphaddw);
118 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
119}
120
121
122/* Opcode VEX.0F38 0x02 - invalid. */
123
124
125/** Opcode VEX.66.0F38 0x02. */
126FNIEMOP_DEF(iemOp_vphaddd_Vx_Hx_Wx)
127{
128 IEMOP_MNEMONIC3(VEX_RVM, VPHADDD, vphaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
129 IEMOPMEDIAOPTF3_INIT_VARS(vphaddd);
130 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
131}
132
133
134/* Opcode VEX.0F38 0x03 - invalid. */
135
136
137/** Opcode VEX.66.0F38 0x03. */
138FNIEMOP_DEF(iemOp_vphaddsw_Vx_Hx_Wx)
139{
140 IEMOP_MNEMONIC3(VEX_RVM, VPHADDSW, vphaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
141 IEMOPMEDIAOPTF3_INIT_VARS(vphaddsw);
142 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
143}
144
145
146/* Opcode VEX.0F38 0x04 - invalid. */
147
148
149/** Opcode VEX.66.0F38 0x04. */
150FNIEMOP_DEF(iemOp_vpmaddubsw_Vx_Hx_Wx)
151{
152 IEMOP_MNEMONIC3(VEX_RVM, VPMADDUBSW, vpmaddubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
153 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddubsw);
154 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
155}
156
157
158/* Opcode VEX.0F38 0x05 - invalid. */
159
160
161/** Opcode VEX.66.0F38 0x05. */
162FNIEMOP_DEF(iemOp_vphsubw_Vx_Hx_Wx)
163{
164 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBW, vphsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
165 IEMOPMEDIAOPTF3_INIT_VARS(vphsubw);
166 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
167}
168
169
170/* Opcode VEX.0F38 0x06 - invalid. */
171
172
173/** Opcode VEX.66.0F38 0x06. */
174FNIEMOP_DEF(iemOp_vphsubd_Vx_Hx_Wx)
175{
176 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBD, vphsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
177 IEMOPMEDIAOPTF3_INIT_VARS(vphsubd);
178 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
179}
180
181
182/* Opcode VEX.0F38 0x07 - invalid. */
183
184
185/** Opcode VEX.66.0F38 0x07. */
186FNIEMOP_DEF(iemOp_vphsubsw_Vx_Hx_Wx)
187{
188 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBSW, vphsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
189 IEMOPMEDIAOPTF3_INIT_VARS(vphsubsw);
190 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
191}
192
193
194/* Opcode VEX.0F38 0x08 - invalid. */
195
196
197/** Opcode VEX.66.0F38 0x08. */
198FNIEMOP_DEF(iemOp_vpsignb_Vx_Hx_Wx)
199{
200 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNB, vpsignb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
201 IEMOPMEDIAOPTF3_INIT_VARS(vpsignb);
202 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
203}
204
205
206/* Opcode VEX.0F38 0x09 - invalid. */
207
208
209/** Opcode VEX.66.0F38 0x09. */
210FNIEMOP_DEF(iemOp_vpsignw_Vx_Hx_Wx)
211{
212 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNW, vpsignw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
213 IEMOPMEDIAOPTF3_INIT_VARS(vpsignw);
214 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
215}
216
217
218/* Opcode VEX.0F38 0x0a - invalid. */
219
220
221/** Opcode VEX.66.0F38 0x0a. */
222FNIEMOP_DEF(iemOp_vpsignd_Vx_Hx_Wx)
223{
224 IEMOP_MNEMONIC3(VEX_RVM, VPSIGND, vpsignd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
225 IEMOPMEDIAOPTF3_INIT_VARS(vpsignd);
226 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
227}
228
229
230/* Opcode VEX.0F38 0x0b - invalid. */
231
232
233/** Opcode VEX.66.0F38 0x0b. */
234FNIEMOP_DEF(iemOp_vpmulhrsw_Vx_Hx_Wx)
235{
236 IEMOP_MNEMONIC3(VEX_RVM, VPMULHRSW, vpmulhrsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
237 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhrsw);
238 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
239}
240
241
242/* Opcode VEX.0F38 0x0c - invalid. */
243
244
245/** Opcode VEX.66.0F38 0x0c.
246 * AVX,AVX */
247FNIEMOP_DEF(iemOp_vpermilps_Vx_Hx_Wx)
248{
249 IEMOP_MNEMONIC3(VEX_RVM, VPERMILPS, vpermilps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
250 IEMOPMEDIAOPTF3_INIT_VARS(vpermilps);
251 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
252}
253
254
255/* Opcode VEX.0F38 0x0d - invalid. */
256
257
258/** Opcode VEX.66.0F38 0x0d.
259 * AVX,AVX */
260FNIEMOP_DEF(iemOp_vpermilpd_Vx_Hx_Wx)
261{
262 IEMOP_MNEMONIC3(VEX_RVM, VPERMILPD, vpermilpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
263 IEMOPMEDIAOPTF3_INIT_VARS(vpermilpd);
264 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
265}
266
267
268/**
269 * Common worker for AVX instructions on the forms:
270 * - vtestps/d xmm1, xmm2/mem128
271 * - vtestps/d ymm1, ymm2/mem256
272 *
273 * Takes function table for function w/o implicit state parameter.
274 *
275 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
276 */
277#define IEMOP_BODY_VTESTP_S_D(a_Instr) \
278 Assert(pVCpu->iem.s.uVexLength <= 1); \
279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
280 if (IEM_IS_MODRM_REG_MODE(bRm)) \
281 { \
282 /* \
283 * Register, register. \
284 */ \
285 if (pVCpu->iem.s.uVexLength) \
286 { \
287 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
288 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
289 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
290 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
291 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0); \
292 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1); \
293 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
294 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
295 IEM_MC_PREPARE_AVX_USAGE(); \
296 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u256, \
300 iemAImpl_ ## a_Instr ## _u256_fallback), \
301 puSrc1, puSrc2, pEFlags); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 } \
305 else \
306 { \
307 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
308 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
309 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0); \
310 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
313 IEM_MC_PREPARE_AVX_USAGE(); \
314 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
315 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
316 IEM_MC_REF_EFLAGS(pEFlags); \
317 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u128, \
318 iemAImpl_ ## a_Instr ## _u128_fallback), \
319 puSrc1, puSrc2, pEFlags); \
320 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
321 IEM_MC_END(); \
322 } \
323 } \
324 else \
325 { \
326 /* \
327 * Register, memory. \
328 */ \
329 if (pVCpu->iem.s.uVexLength) \
330 { \
331 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
332 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
333 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
335 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0); \
336 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1); \
337 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
339 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
341 IEM_MC_PREPARE_AVX_USAGE(); \
342 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
343 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
344 IEM_MC_REF_EFLAGS(pEFlags); \
345 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u256, \
346 iemAImpl_ ## a_Instr ## _u256_fallback), \
347 puSrc1, puSrc2, pEFlags); \
348 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
349 IEM_MC_END(); \
350 } \
351 else \
352 { \
353 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
354 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
356 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0); \
357 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1); \
358 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
360 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
361 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
362 IEM_MC_PREPARE_AVX_USAGE(); \
363 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
364 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
365 IEM_MC_REF_EFLAGS(pEFlags); \
366 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u128, \
367 iemAImpl_ ## a_Instr ## _u128_fallback), \
368 puSrc1, puSrc2, pEFlags); \
369 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
370 IEM_MC_END(); \
371 } \
372 } \
373 (void)0
374
375
376/* Opcode VEX.0F38 0x0e - invalid. */
377
378
379/**
380 * @opcode 0x0e
381 * @oppfx 0x66
382 * @opflmodify cf,zf,pf,af,sf,of
383 * @opflclear pf,af,sf,of
384 */
385FNIEMOP_DEF(iemOp_vtestps_Vx_Wx)
386{
387 /** @todo We need to check VEX.W somewhere... it is documented to \#UD on all
388 * CPU modes. */
389 IEMOP_MNEMONIC2(VEX_RM, VTESTPS, vtestps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_W_ZERO);
390 IEMOP_BODY_VTESTP_S_D(vtestps);
391}
392
393
394/* Opcode VEX.0F38 0x0f - invalid. */
395
396
397/**
398 * @opcode 0x0f
399 * @oppfx 0x66
400 * @opflmodify cf,zf,pf,af,sf,of
401 * @opflclear pf,af,sf,of
402 */
403FNIEMOP_DEF(iemOp_vtestpd_Vx_Wx)
404{
405 /** @todo We need to check VEX.W somewhere... it is documented to \#UD on all
406 * CPU modes. */
407 IEMOP_MNEMONIC2(VEX_RM, VTESTPD, vtestpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_W_ZERO);
408 IEMOP_BODY_VTESTP_S_D(vtestpd);
409}
410
411
412/* Opcode VEX.0F38 0x10 - invalid */
413/* Opcode VEX.66.0F38 0x10 - invalid (legacy only). */
414/* Opcode VEX.0F38 0x11 - invalid */
415/* Opcode VEX.66.0F38 0x11 - invalid */
416/* Opcode VEX.0F38 0x12 - invalid */
417/* Opcode VEX.66.0F38 0x12 - invalid */
418/* Opcode VEX.0F38 0x13 - invalid */
419/* Opcode VEX.66.0F38 0x13 (vex only). */
420FNIEMOP_STUB(iemOp_vcvtph2ps_Vx_Wx);
421/* Opcode VEX.0F38 0x14 - invalid */
422/* Opcode VEX.66.0F38 0x14 - invalid (legacy only). */
423/* Opcode VEX.0F38 0x15 - invalid */
424/* Opcode VEX.66.0F38 0x15 - invalid (legacy only). */
425/* Opcode VEX.0F38 0x16 - invalid */
426
427
428/** Opcode VEX.66.0F38 0x16. */
429FNIEMOP_DEF(iemOp_vpermps_Vqq_Hqq_Wqq)
430{
431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
432 if (IEM_IS_MODRM_REG_MODE(bRm))
433 {
434 /*
435 * Register, register.
436 */
437 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
438 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
439 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
440 IEM_MC_PREPARE_AVX_USAGE();
441 IEM_MC_LOCAL(RTUINT256U, uSrc1);
442 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
443 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
444 IEM_MC_LOCAL(RTUINT256U, uSrc2);
445 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
446 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
447 IEM_MC_LOCAL(RTUINT256U, uDst);
448 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
449 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermps_u256, iemAImpl_vpermps_u256_fallback),
450 puDst, puSrc1, puSrc2);
451 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
452 IEM_MC_ADVANCE_RIP_AND_FINISH();
453 IEM_MC_END();
454 }
455 else
456 {
457 /*
458 * Register, memory.
459 */
460 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
463 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
464 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
465 IEM_MC_PREPARE_AVX_USAGE();
466 IEM_MC_LOCAL(RTUINT256U, uSrc2);
467 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
468 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
469 IEM_MC_LOCAL(RTUINT256U, uSrc1);
470 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
471 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
472 IEM_MC_LOCAL(RTUINT256U, uDst);
473 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
474 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermps_u256, iemAImpl_vpermps_u256_fallback),
475 puDst, puSrc1, puSrc2);
476 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
477 IEM_MC_ADVANCE_RIP_AND_FINISH();
478 IEM_MC_END();
479 }
480}
481
482
483/* Opcode VEX.0F38 0x17 - invalid */
484
485
486/**
487 * @opcode 0x17
488 * @oppfx 0x66
489 * @opflmodify cf,pf,af,zf,sf,of
490 * @opflclear pf,af,sf,of
491 */
492FNIEMOP_DEF(iemOp_vptest_Vx_Wx)
493{
494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
495 if (IEM_IS_MODRM_REG_MODE(bRm))
496 {
497 /*
498 * Register, register.
499 */
500 if (pVCpu->iem.s.uVexLength)
501 {
502 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
503 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
504 IEM_MC_LOCAL(RTUINT256U, uSrc1);
505 IEM_MC_LOCAL(RTUINT256U, uSrc2);
506 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
507 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
509 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
510 IEM_MC_PREPARE_AVX_USAGE();
511 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
512 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
513 IEM_MC_REF_EFLAGS(pEFlags);
514 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
515 puSrc1, puSrc2, pEFlags);
516 IEM_MC_ADVANCE_RIP_AND_FINISH();
517 IEM_MC_END();
518 }
519 else
520 {
521 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
522 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
523 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
524 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
525 IEM_MC_ARG(uint32_t *, pEFlags, 2);
526 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
527 IEM_MC_PREPARE_AVX_USAGE();
528 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
529 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
530 IEM_MC_REF_EFLAGS(pEFlags);
531 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
532 IEM_MC_ADVANCE_RIP_AND_FINISH();
533 IEM_MC_END();
534 }
535 }
536 else
537 {
538 /*
539 * Register, memory.
540 */
541 if (pVCpu->iem.s.uVexLength)
542 {
543 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
544 IEM_MC_LOCAL(RTUINT256U, uSrc1);
545 IEM_MC_LOCAL(RTUINT256U, uSrc2);
546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
547 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
548 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
549 IEM_MC_ARG(uint32_t *, pEFlags, 2);
550
551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
552 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
553 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
554 IEM_MC_PREPARE_AVX_USAGE();
555
556 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
557 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
558 IEM_MC_REF_EFLAGS(pEFlags);
559 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
560 puSrc1, puSrc2, pEFlags);
561
562 IEM_MC_ADVANCE_RIP_AND_FINISH();
563 IEM_MC_END();
564 }
565 else
566 {
567 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
568 IEM_MC_LOCAL(RTUINT128U, uSrc2);
569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
570 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
571 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
572 IEM_MC_ARG(uint32_t *, pEFlags, 2);
573
574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
575 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
576 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
577 IEM_MC_PREPARE_AVX_USAGE();
578
579 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
580 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
581 IEM_MC_REF_EFLAGS(pEFlags);
582 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
583
584 IEM_MC_ADVANCE_RIP_AND_FINISH();
585 IEM_MC_END();
586 }
587 }
588}
589
590
591/* Opcode VEX.0F38 0x18 - invalid */
592
593
594/** Opcode VEX.66.0F38 0x18. */
595FNIEMOP_DEF(iemOp_vbroadcastss_Vx_Wd)
596{
597 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSS, vbroadcastss, Vx, Wx, DISOPTYPE_HARMLESS, 0);
598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
599 if (IEM_IS_MODRM_REG_MODE(bRm))
600 {
601 /*
602 * Register, register.
603 */
604 if (pVCpu->iem.s.uVexLength)
605 {
606 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
607 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
608 IEM_MC_LOCAL(uint32_t, uSrc);
609
610 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
611 IEM_MC_PREPARE_AVX_USAGE();
612
613 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
614 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
615
616 IEM_MC_ADVANCE_RIP_AND_FINISH();
617 IEM_MC_END();
618 }
619 else
620 {
621 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
622 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
623 IEM_MC_LOCAL(uint32_t, uSrc);
624
625 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
626 IEM_MC_PREPARE_AVX_USAGE();
627 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
628 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
629
630 IEM_MC_ADVANCE_RIP_AND_FINISH();
631 IEM_MC_END();
632 }
633 }
634 else
635 {
636 /*
637 * Register, memory.
638 */
639 if (pVCpu->iem.s.uVexLength)
640 {
641 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
642 IEM_MC_LOCAL(uint32_t, uSrc);
643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
644
645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
646 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
647 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
648 IEM_MC_PREPARE_AVX_USAGE();
649
650 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
651 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
652
653 IEM_MC_ADVANCE_RIP_AND_FINISH();
654 IEM_MC_END();
655 }
656 else
657 {
658 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
659 IEM_MC_LOCAL(uint32_t, uSrc);
660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
661
662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
663 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
664 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
665 IEM_MC_PREPARE_AVX_USAGE();
666
667 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
668 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
669
670 IEM_MC_ADVANCE_RIP_AND_FINISH();
671 IEM_MC_END();
672 }
673 }
674}
675
676
677/* Opcode VEX.0F38 0x19 - invalid */
678
679
680/** Opcode VEX.66.0F38 0x19. */
681FNIEMOP_DEF(iemOp_vbroadcastsd_Vqq_Wq)
682{
683 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSD, vbroadcastsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
685 if (IEM_IS_MODRM_REG_MODE(bRm))
686 {
687 /*
688 * Register, register.
689 */
690 if (pVCpu->iem.s.uVexLength)
691 {
692 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
693 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
694 IEM_MC_LOCAL(uint64_t, uSrc);
695
696 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
697 IEM_MC_PREPARE_AVX_USAGE();
698
699 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
700 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
701
702 IEM_MC_ADVANCE_RIP_AND_FINISH();
703 IEM_MC_END();
704 }
705 else
706 {
707 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
708 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
709 IEM_MC_LOCAL(uint64_t, uSrc);
710
711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
712 IEM_MC_PREPARE_AVX_USAGE();
713 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
714 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
715
716 IEM_MC_ADVANCE_RIP_AND_FINISH();
717 IEM_MC_END();
718 }
719 }
720 else
721 {
722 /*
723 * Register, memory.
724 */
725 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
726 IEM_MC_LOCAL(uint64_t, uSrc);
727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
728
729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
730 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
731 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
732 IEM_MC_PREPARE_AVX_USAGE();
733
734 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
735 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
736
737 IEM_MC_ADVANCE_RIP_AND_FINISH();
738 IEM_MC_END();
739 }
740}
741
742
743/* Opcode VEX.0F38 0x1a - invalid */
744
745
746/** Opcode VEX.66.0F38 0x1a. */
747FNIEMOP_DEF(iemOp_vbroadcastf128_Vqq_Mdq)
748{
749 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTF128, vbroadcastf128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
751 if (IEM_IS_MODRM_REG_MODE(bRm))
752 {
753 /*
754 * No register, register.
755 */
756 IEMOP_RAISE_INVALID_OPCODE_RET();
757 }
758 else
759 {
760 /*
761 * Register, memory.
762 */
763 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
764 IEM_MC_LOCAL(RTUINT128U, uSrc);
765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
766
767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
768 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
769 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
770 IEM_MC_PREPARE_AVX_USAGE();
771
772 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
773 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
774
775 IEM_MC_ADVANCE_RIP_AND_FINISH();
776 IEM_MC_END();
777 }
778}
779
780
781/* Opcode VEX.0F38 0x1b - invalid */
782/* Opcode VEX.66.0F38 0x1b - invalid */
783/* Opcode VEX.0F38 0x1c - invalid. */
784
785
786/** Opcode VEX.66.0F38 0x1c. */
787FNIEMOP_DEF(iemOp_vpabsb_Vx_Wx)
788{
789 IEMOP_MNEMONIC2(VEX_RM, VPABSB, vpabsb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
790 IEMOPMEDIAOPTF2_INIT_VARS(vpabsb);
791 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
792}
793
794
795/* Opcode VEX.0F38 0x1d - invalid. */
796
797
798/** Opcode VEX.66.0F38 0x1d. */
799FNIEMOP_DEF(iemOp_vpabsw_Vx_Wx)
800{
801 IEMOP_MNEMONIC2(VEX_RM, VPABSW, vpabsw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
802 IEMOPMEDIAOPTF2_INIT_VARS(vpabsw);
803 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
804}
805
806/* Opcode VEX.0F38 0x1e - invalid. */
807
808
809/** Opcode VEX.66.0F38 0x1e. */
810FNIEMOP_DEF(iemOp_vpabsd_Vx_Wx)
811{
812 IEMOP_MNEMONIC2(VEX_RM, VPABSD, vpabsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
813 IEMOPMEDIAOPTF2_INIT_VARS(vpabsd);
814 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
815}
816
817
818/* Opcode VEX.0F38 0x1f - invalid */
819/* Opcode VEX.66.0F38 0x1f - invalid */
820
821
822/** Body for the vpmov{s,z}x* instructions. */
823#define IEMOP_BODY_VPMOV_S_Z(a_Instr, a_SrcWidth, a_VexLengthMemFetch) \
824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
825 if (IEM_IS_MODRM_REG_MODE(bRm)) \
826 { \
827 /* \
828 * Register, register. \
829 */ \
830 if (pVCpu->iem.s.uVexLength) \
831 { \
832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
833 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
834 IEM_MC_LOCAL(RTUINT256U, uDst); \
835 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
836 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
837 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
838 IEM_MC_PREPARE_AVX_USAGE(); \
839 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
840 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
841 iemAImpl_ ## a_Instr ## _u256_fallback), \
842 puDst, puSrc); \
843 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
844 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
845 IEM_MC_END(); \
846 } \
847 else \
848 { \
849 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
850 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
851 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
852 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
853 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
854 IEM_MC_PREPARE_AVX_USAGE(); \
855 IEM_MC_FETCH_XREG_U ## a_SrcWidth (uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); \
856 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
857 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
858 iemAImpl_## a_Instr ## _u128_fallback), \
859 puDst, uSrc); \
860 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
861 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
862 IEM_MC_END(); \
863 } \
864 } \
865 else \
866 { \
867 /* \
868 * Register, memory. \
869 */ \
870 if (pVCpu->iem.s.uVexLength) \
871 { \
872 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
873 IEM_MC_LOCAL(RTUINT256U, uDst); \
874 IEM_MC_LOCAL(RTUINT128U, uSrc); \
875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
876 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
877 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
879 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
880 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
881 IEM_MC_PREPARE_AVX_USAGE(); \
882 a_VexLengthMemFetch(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
883 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
884 iemAImpl_ ## a_Instr ## _u256_fallback), \
885 puDst, puSrc); \
886 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
887 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
888 IEM_MC_END(); \
889 } \
890 else \
891 { \
892 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
894 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
895 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
897 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
898 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
899 IEM_MC_PREPARE_AVX_USAGE(); \
900 IEM_MC_FETCH_MEM_U ## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
901 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
902 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
903 iemAImpl_ ## a_Instr ## _u128_fallback), \
904 puDst, uSrc); \
905 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
906 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
907 IEM_MC_END(); \
908 } \
909 } \
910 (void)0
911
912/** Opcode VEX.66.0F38 0x20. */
913FNIEMOP_DEF(iemOp_vpmovsxbw_Vx_UxMq)
914{
915 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
916 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBW, vpmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
917 IEMOP_BODY_VPMOV_S_Z(vpmovsxbw, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
918}
919
920
921/** Opcode VEX.66.0F38 0x21. */
922FNIEMOP_DEF(iemOp_vpmovsxbd_Vx_UxMd)
923{
924 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
925 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBD, vpmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
926 IEMOP_BODY_VPMOV_S_Z(vpmovsxbd, 32, IEM_MC_FETCH_MEM_U128);
927}
928
929
930/** Opcode VEX.66.0F38 0x22. */
931FNIEMOP_DEF(iemOp_vpmovsxbq_Vx_UxMw)
932{
933 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
934 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBQ, vpmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
935 IEMOP_BODY_VPMOV_S_Z(vpmovsxbq, 16, IEM_MC_FETCH_MEM_U128);
936}
937
938
939/** Opcode VEX.66.0F38 0x23. */
940FNIEMOP_DEF(iemOp_vpmovsxwd_Vx_UxMq)
941{
942 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
943 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWD, vpmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
944 IEMOP_BODY_VPMOV_S_Z(vpmovsxwd, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
945}
946
947
948/** Opcode VEX.66.0F38 0x24. */
949FNIEMOP_DEF(iemOp_vpmovsxwq_Vx_UxMd)
950{
951 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
952 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWQ, vpmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
953 IEMOP_BODY_VPMOV_S_Z(vpmovsxwq, 32, IEM_MC_FETCH_MEM_U128);
954}
955
956
957/** Opcode VEX.66.0F38 0x25. */
958FNIEMOP_DEF(iemOp_vpmovsxdq_Vx_UxMq)
959{
960 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
961 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXDQ, vpmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
962 IEMOP_BODY_VPMOV_S_Z(vpmovsxdq, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
963}
964
965
966/* Opcode VEX.66.0F38 0x26 - invalid */
967/* Opcode VEX.66.0F38 0x27 - invalid */
968
969
970/** Opcode VEX.66.0F38 0x28. */
971FNIEMOP_DEF(iemOp_vpmuldq_Vx_Hx_Wx)
972{
973 IEMOP_MNEMONIC3(VEX_RVM, VPMULDQ, vpmuldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
974 IEMOPMEDIAOPTF3_INIT_VARS(vpmuldq);
975 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
976}
977
978
979/** Opcode VEX.66.0F38 0x29. */
980FNIEMOP_DEF(iemOp_vpcmpeqq_Vx_Hx_Wx)
981{
982 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQQ, vpcmpeqq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
983 IEMOPMEDIAOPTF3_INIT_VARS(vpcmpeqq);
984 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
985}
986
987
988FNIEMOP_DEF(iemOp_vmovntdqa_Vx_Mx)
989{
990 Assert(pVCpu->iem.s.uVexLength <= 1);
991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
992 if (IEM_IS_MODRM_MEM_MODE(bRm))
993 {
994 if (pVCpu->iem.s.uVexLength == 0)
995 {
996 /**
997 * @opcode 0x2a
998 * @opcodesub !11 mr/reg vex.l=0
999 * @oppfx 0x66
1000 * @opcpuid avx
1001 * @opgroup og_avx_cachect
1002 * @opxcpttype 1
1003 * @optest op1=-1 op2=2 -> op1=2
1004 * @optest op1=0 op2=-42 -> op1=-42
1005 */
1006 /* 128-bit: Memory, register. */
1007 IEMOP_MNEMONIC2EX(vmovntdqa_Vdq_WO_Mdq_L0, "vmovntdqa, Vdq_WO, Mdq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
1008 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1009 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1010 IEM_MC_LOCAL(RTUINT128U, uSrc);
1011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1012
1013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1014 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1015 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1016 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1017
1018 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1019 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1020
1021 IEM_MC_ADVANCE_RIP_AND_FINISH();
1022 IEM_MC_END();
1023 }
1024 else
1025 {
1026 /**
1027 * @opdone
1028 * @opcode 0x2a
1029 * @opcodesub !11 mr/reg vex.l=1
1030 * @oppfx 0x66
1031 * @opcpuid avx2
1032 * @opgroup og_avx2_cachect
1033 * @opxcpttype 1
1034 * @optest op1=-1 op2=2 -> op1=2
1035 * @optest op1=0 op2=-42 -> op1=-42
1036 */
1037 /* 256-bit: Memory, register. */
1038 IEMOP_MNEMONIC2EX(vmovntdqa_Vqq_WO_Mqq_L1, "vmovntdqa, Vqq_WO,Mqq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
1039 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1040 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1041 IEM_MC_LOCAL(RTUINT256U, uSrc);
1042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1043
1044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1045 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1046 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1047 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1048
1049 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1050 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1051
1052 IEM_MC_ADVANCE_RIP_AND_FINISH();
1053 IEM_MC_END();
1054 }
1055 }
1056
1057 /**
1058 * @opdone
1059 * @opmnemonic udvex660f382arg
1060 * @opcode 0x2a
1061 * @opcodesub 11 mr/reg
1062 * @oppfx 0x66
1063 * @opunused immediate
1064 * @opcpuid avx
1065 * @optest ->
1066 */
1067 else
1068 IEMOP_RAISE_INVALID_OPCODE_RET();
1069}
1070
1071
1072/** Opcode VEX.66.0F38 0x2b. */
1073FNIEMOP_DEF(iemOp_vpackusdw_Vx_Hx_Wx)
1074{
1075 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSDW, vpackusdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1076 IEMOPMEDIAOPTF3_INIT_VARS( vpackusdw);
1077 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1078}
1079
1080
1081/** Opcode VEX.66.0F38 0x2c. */
1082FNIEMOP_DEF(iemOp_vmaskmovps_Vx_Hx_Mx)
1083{
1084 // IEMOP_MNEMONIC3(RM, VMASKMOVPS, vmaskmovps, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1086 if (!IEM_IS_MODRM_REG_MODE(bRm))
1087 {
1088 if (pVCpu->iem.s.uVexLength)
1089 {
1090 /*
1091 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
1092 */
1093 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1094 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1095 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1096 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1098 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1099 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1100
1101 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1102 IEM_MC_PREPARE_AVX_USAGE();
1103
1104 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
1105
1106 IEM_MC_END();
1107 }
1108 else
1109 {
1110 /*
1111 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
1112 */
1113 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1114 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1115 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1116 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1118 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1119 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1120
1121 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1122 IEM_MC_PREPARE_AVX_USAGE();
1123
1124 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
1125
1126 IEM_MC_END();
1127 }
1128 }
1129 else
1130 {
1131 /* The register, register encoding is invalid. */
1132 IEMOP_RAISE_INVALID_OPCODE_RET();
1133 }
1134}
1135
1136
1137/** Opcode VEX.66.0F38 0x2d. */
1138FNIEMOP_DEF(iemOp_vmaskmovpd_Vx_Hx_Mx)
1139{
1140 // IEMOP_MNEMONIC3(RM, VMASKMOVPD, vmaskmovpd, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1142 if (!IEM_IS_MODRM_REG_MODE(bRm))
1143 {
1144 if (pVCpu->iem.s.uVexLength)
1145 {
1146 /*
1147 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
1148 */
1149 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1150 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1151 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1152 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1154 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1155 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1156
1157 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1158 IEM_MC_PREPARE_AVX_USAGE();
1159
1160 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
1161
1162 IEM_MC_END();
1163 }
1164 else
1165 {
1166 /*
1167 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
1168 */
1169 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1170 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1171 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1172 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1174 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1175 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1176
1177 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1178 IEM_MC_PREPARE_AVX_USAGE();
1179
1180 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
1181
1182 IEM_MC_END();
1183 }
1184 }
1185 else
1186 {
1187 /* The register, register encoding is invalid. */
1188 IEMOP_RAISE_INVALID_OPCODE_RET();
1189 }
1190}
1191
1192
1193/** Opcode VEX.66.0F38 0x2e. */
1194FNIEMOP_DEF(iemOp_vmaskmovps_Mx_Hx_Vx)
1195{
1196 // IEMOP_MNEMONIC3(RM, VMASKMOVPS, vmaskmovps, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1198 if (!IEM_IS_MODRM_REG_MODE(bRm))
1199 {
1200 if (pVCpu->iem.s.uVexLength)
1201 {
1202 /*
1203 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
1204 */
1205 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1206
1207 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1209 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1210 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1211 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1212 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1213
1214 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1215 IEM_MC_PREPARE_AVX_USAGE();
1216
1217 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
1218
1219 IEM_MC_END();
1220 }
1221 else
1222 {
1223 /*
1224 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
1225 */
1226 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1227
1228 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1230 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1231 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1232 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1233 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1234
1235 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1236 IEM_MC_PREPARE_AVX_USAGE();
1237
1238 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
1239
1240 IEM_MC_END();
1241 }
1242 }
1243 else
1244 {
1245 /* The register, register encoding is invalid. */
1246 IEMOP_RAISE_INVALID_OPCODE_RET();
1247 }
1248}
1249
1250
1251/** Opcode VEX.66.0F38 0x2f. */
1252FNIEMOP_DEF(iemOp_vmaskmovpd_Mx_Hx_Vx)
1253{
1254 // IEMOP_MNEMONIC3(RM, VMASKMOVPD, vmaskmovpd, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1256 if (!IEM_IS_MODRM_REG_MODE(bRm))
1257 {
1258 if (pVCpu->iem.s.uVexLength)
1259 {
1260 /*
1261 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
1262 */
1263 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1264
1265 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1267 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1268 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1269 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1270 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1271
1272 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1273 IEM_MC_PREPARE_AVX_USAGE();
1274
1275 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
1276
1277 IEM_MC_END();
1278 }
1279 else
1280 {
1281 /*
1282 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
1283 */
1284 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1285
1286 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1288 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1289 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1290 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1291 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1292
1293 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1294 IEM_MC_PREPARE_AVX_USAGE();
1295
1296 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
1297
1298 IEM_MC_END();
1299 }
1300 }
1301 else
1302 {
1303 /* The register, register encoding is invalid. */
1304 IEMOP_RAISE_INVALID_OPCODE_RET();
1305 }
1306}
1307
1308
1309/** Opcode VEX.66.0F38 0x30. */
1310FNIEMOP_DEF(iemOp_vpmovzxbw_Vx_UxMq)
1311{
1312 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1313 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBW, vpmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1314 IEMOP_BODY_VPMOV_S_Z(vpmovzxbw, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1315}
1316
1317
1318/** Opcode VEX.66.0F38 0x31. */
1319FNIEMOP_DEF(iemOp_vpmovzxbd_Vx_UxMd)
1320{
1321 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1322 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBD, vpmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1323 IEMOP_BODY_VPMOV_S_Z(vpmovzxbd, 32, IEM_MC_FETCH_MEM_U128);
1324}
1325
1326
1327/** Opcode VEX.66.0F38 0x32. */
1328FNIEMOP_DEF(iemOp_vpmovzxbq_Vx_UxMw)
1329{
1330 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1331 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBQ, vpmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1332 IEMOP_BODY_VPMOV_S_Z(vpmovzxbq, 16, IEM_MC_FETCH_MEM_U128);
1333}
1334
1335
1336/** Opcode VEX.66.0F38 0x33. */
1337FNIEMOP_DEF(iemOp_vpmovzxwd_Vx_UxMq)
1338{
1339 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1340 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWD, vpmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1341 IEMOP_BODY_VPMOV_S_Z(vpmovzxwd, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1342}
1343
1344
1345/** Opcode VEX.66.0F38 0x34. */
1346FNIEMOP_DEF(iemOp_vpmovzxwq_Vx_UxMd)
1347{
1348 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1349 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWQ, vpmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1350 IEMOP_BODY_VPMOV_S_Z(vpmovzxwq, 32, IEM_MC_FETCH_MEM_U128);
1351}
1352
1353
1354/** Opcode VEX.66.0F38 0x35. */
1355FNIEMOP_DEF(iemOp_vpmovzxdq_Vx_UxMq)
1356{
1357 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1358 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXDQ, vpmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1359 IEMOP_BODY_VPMOV_S_Z(vpmovzxdq, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1360}
1361
1362
1363/* Opcode VEX.66.0F38 0x36. */
1364FNIEMOP_DEF(iemOp_vpermd_Vqq_Hqq_Wqq)
1365{
1366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1367 if (IEM_IS_MODRM_REG_MODE(bRm))
1368 {
1369 /*
1370 * Register, register.
1371 */
1372 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1373 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1374 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1375 IEM_MC_PREPARE_AVX_USAGE();
1376 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1377 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1378 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1379 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1380 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1381 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1382 IEM_MC_LOCAL(RTUINT256U, uDst);
1383 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1384 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermd_u256, iemAImpl_vpermd_u256_fallback),
1385 puDst, puSrc1, puSrc2);
1386 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1387 IEM_MC_ADVANCE_RIP_AND_FINISH();
1388 IEM_MC_END();
1389 }
1390 else
1391 {
1392 /*
1393 * Register, memory.
1394 */
1395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1398 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1399 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1400 IEM_MC_PREPARE_AVX_USAGE();
1401 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1402 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1403 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1404 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1405 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1406 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1407 IEM_MC_LOCAL(RTUINT256U, uDst);
1408 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1409 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermd_u256, iemAImpl_vpermd_u256_fallback),
1410 puDst, puSrc1, puSrc2);
1411 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1412 IEM_MC_ADVANCE_RIP_AND_FINISH();
1413 IEM_MC_END();
1414 }
1415}
1416
1417
1418/** Opcode VEX.66.0F38 0x37. */
1419FNIEMOP_DEF(iemOp_vpcmpgtq_Vx_Hx_Wx)
1420{
1421 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTQ, vpcmpgtq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1422 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtq);
1423 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1424}
1425
1426
1427/** Opcode VEX.66.0F38 0x38. */
1428FNIEMOP_DEF(iemOp_vpminsb_Vx_Hx_Wx)
1429{
1430 IEMOP_MNEMONIC3(VEX_RVM, VPMINSB, vpminsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1431 IEMOPMEDIAOPTF3_INIT_VARS( vpminsb);
1432 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1433}
1434
1435
1436/** Opcode VEX.66.0F38 0x39. */
1437FNIEMOP_DEF(iemOp_vpminsd_Vx_Hx_Wx)
1438{
1439 IEMOP_MNEMONIC3(VEX_RVM, VPMINSD, vpminsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1440 IEMOPMEDIAOPTF3_INIT_VARS( vpminsd);
1441 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1442}
1443
1444
1445/** Opcode VEX.66.0F38 0x3a. */
1446FNIEMOP_DEF(iemOp_vpminuw_Vx_Hx_Wx)
1447{
1448 IEMOP_MNEMONIC3(VEX_RVM, VPMINUW, vpminuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1449 IEMOPMEDIAOPTF3_INIT_VARS( vpminuw);
1450 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1451}
1452
1453
1454/** Opcode VEX.66.0F38 0x3b. */
1455FNIEMOP_DEF(iemOp_vpminud_Vx_Hx_Wx)
1456{
1457 IEMOP_MNEMONIC3(VEX_RVM, VPMINUD, vpminud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1458 IEMOPMEDIAOPTF3_INIT_VARS( vpminud);
1459 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1460}
1461
1462
1463/** Opcode VEX.66.0F38 0x3c. */
1464FNIEMOP_DEF(iemOp_vpmaxsb_Vx_Hx_Wx)
1465{
1466 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSB, vpmaxsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1467 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxsb);
1468 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1469}
1470
1471
1472/** Opcode VEX.66.0F38 0x3d. */
1473FNIEMOP_DEF(iemOp_vpmaxsd_Vx_Hx_Wx)
1474{
1475 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSD, vpmaxsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1476 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxsd);
1477 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1478}
1479
1480
1481/** Opcode VEX.66.0F38 0x3e. */
1482FNIEMOP_DEF(iemOp_vpmaxuw_Vx_Hx_Wx)
1483{
1484 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUW, vpmaxuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1485 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxuw);
1486 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1487}
1488
1489
1490/** Opcode VEX.66.0F38 0x3f. */
1491FNIEMOP_DEF(iemOp_vpmaxud_Vx_Hx_Wx)
1492{
1493 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUD, vpmaxud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1494 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxud);
1495 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1496}
1497
1498
1499/** Opcode VEX.66.0F38 0x40. */
1500FNIEMOP_DEF(iemOp_vpmulld_Vx_Hx_Wx)
1501{
1502 IEMOP_MNEMONIC3(VEX_RVM, VPMULLD, vpmulld, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1503 IEMOPMEDIAOPTF3_INIT_VARS(vpmulld);
1504 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1505}
1506
1507
1508/** Opcode VEX.66.0F38 0x41. */
1509FNIEMOP_DEF(iemOp_vphminposuw_Vdq_Wdq)
1510{
1511 IEMOP_MNEMONIC2(VEX_RM, VPHMINPOSUW, vphminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1513 if (IEM_IS_MODRM_REG_MODE(bRm))
1514 {
1515 /*
1516 * Register, register.
1517 */
1518 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1519 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1520 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1521 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1522 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1523 IEM_MC_PREPARE_AVX_USAGE();
1524 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1525 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1526 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
1527 puDst, puSrc);
1528 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1529 IEM_MC_ADVANCE_RIP_AND_FINISH();
1530 IEM_MC_END();
1531 }
1532 else
1533 {
1534 /*
1535 * Register, memory.
1536 */
1537 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1538 IEM_MC_LOCAL(RTUINT128U, uSrc);
1539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1540 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1541 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1542
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1545 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1546 IEM_MC_PREPARE_AVX_USAGE();
1547
1548 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1549 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1550 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
1551 puDst, puSrc);
1552 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1553
1554 IEM_MC_ADVANCE_RIP_AND_FINISH();
1555 IEM_MC_END();
1556 }
1557}
1558
1559
1560/* Opcode VEX.66.0F38 0x42 - invalid. */
1561/* Opcode VEX.66.0F38 0x43 - invalid. */
1562/* Opcode VEX.66.0F38 0x44 - invalid. */
1563
1564
1565/** Opcode VEX.66.0F38 0x45. */
1566FNIEMOP_DEF(iemOp_vpsrlvd_q_Vx_Hx_Wx)
1567{
1568 IEMOP_MNEMONIC3(VEX_RVM, VPSRLVD, vpsrlvd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1569
1570 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1571 {
1572 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlvq);
1573 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1574 }
1575 else
1576 {
1577 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlvd);
1578 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1579 }
1580}
1581
1582
1583/** Opcode VEX.66.0F38 0x46. */
1584FNIEMOP_DEF(iemOp_vpsravd_Vx_Hx_Wx)
1585{
1586 IEMOP_MNEMONIC3(VEX_RVM, VPSRAVD, vpsravd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1587 IEMOPMEDIAOPTF3_INIT_VARS(vpsravd);
1588 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1589}
1590
1591
1592/** Opcode VEX.66.0F38 0x47. */
1593FNIEMOP_DEF(iemOp_vpsllvd_q_Vx_Hx_Wx)
1594{
1595 IEMOP_MNEMONIC3(VEX_RVM, VPSLLVD, vpsllvd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1596
1597 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1598 {
1599 IEMOPMEDIAOPTF3_INIT_VARS(vpsllvq);
1600 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1601 }
1602 else
1603 {
1604 IEMOPMEDIAOPTF3_INIT_VARS(vpsllvd);
1605 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1606 }
1607}
1608
1609
1610/* Opcode VEX.66.0F38 0x48 - invalid. */
1611/* Opcode VEX.66.0F38 0x49 - invalid. */
1612/* Opcode VEX.66.0F38 0x4a - invalid. */
1613/* Opcode VEX.66.0F38 0x4b - invalid. */
1614/* Opcode VEX.66.0F38 0x4c - invalid. */
1615/* Opcode VEX.66.0F38 0x4d - invalid. */
1616/* Opcode VEX.66.0F38 0x4e - invalid. */
1617/* Opcode VEX.66.0F38 0x4f - invalid. */
1618
1619/* Opcode VEX.66.0F38 0x50 - invalid. */
1620/* Opcode VEX.66.0F38 0x51 - invalid. */
1621/* Opcode VEX.66.0F38 0x52 - invalid. */
1622/* Opcode VEX.66.0F38 0x53 - invalid. */
1623/* Opcode VEX.66.0F38 0x54 - invalid. */
1624/* Opcode VEX.66.0F38 0x55 - invalid. */
1625/* Opcode VEX.66.0F38 0x56 - invalid. */
1626/* Opcode VEX.66.0F38 0x57 - invalid. */
1627
1628
1629/** Opcode VEX.66.0F38 0x58. */
1630FNIEMOP_DEF(iemOp_vpbroadcastd_Vx_Wx)
1631{
1632 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTD, vpbroadcastd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1634 if (IEM_IS_MODRM_REG_MODE(bRm))
1635 {
1636 /*
1637 * Register, register.
1638 */
1639 if (pVCpu->iem.s.uVexLength)
1640 {
1641 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1642 IEM_MC_LOCAL(uint32_t, uSrc);
1643
1644 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1645 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1646 IEM_MC_PREPARE_AVX_USAGE();
1647
1648 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1649 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1650
1651 IEM_MC_ADVANCE_RIP_AND_FINISH();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1657 IEM_MC_LOCAL(uint32_t, uSrc);
1658
1659 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1660 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1661 IEM_MC_PREPARE_AVX_USAGE();
1662 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1663 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1664
1665 IEM_MC_ADVANCE_RIP_AND_FINISH();
1666 IEM_MC_END();
1667 }
1668 }
1669 else
1670 {
1671 /*
1672 * Register, memory.
1673 */
1674 if (pVCpu->iem.s.uVexLength)
1675 {
1676 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1677 IEM_MC_LOCAL(uint32_t, uSrc);
1678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1679
1680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1681 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1682 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1683 IEM_MC_PREPARE_AVX_USAGE();
1684
1685 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1686 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1687
1688 IEM_MC_ADVANCE_RIP_AND_FINISH();
1689 IEM_MC_END();
1690 }
1691 else
1692 {
1693 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1694 IEM_MC_LOCAL(uint32_t, uSrc);
1695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1696
1697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1698 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1699 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1700 IEM_MC_PREPARE_AVX_USAGE();
1701
1702 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1703 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1704
1705 IEM_MC_ADVANCE_RIP_AND_FINISH();
1706 IEM_MC_END();
1707 }
1708 }
1709}
1710
1711
1712/** Opcode VEX.66.0F38 0x59. */
1713FNIEMOP_DEF(iemOp_vpbroadcastq_Vx_Wx)
1714{
1715 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTQ, vpbroadcastq, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1717 if (IEM_IS_MODRM_REG_MODE(bRm))
1718 {
1719 /*
1720 * Register, register.
1721 */
1722 if (pVCpu->iem.s.uVexLength)
1723 {
1724 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1725 IEM_MC_LOCAL(uint64_t, uSrc);
1726
1727 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1728 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1729 IEM_MC_PREPARE_AVX_USAGE();
1730
1731 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1732 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1733
1734 IEM_MC_ADVANCE_RIP_AND_FINISH();
1735 IEM_MC_END();
1736 }
1737 else
1738 {
1739 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1740 IEM_MC_LOCAL(uint64_t, uSrc);
1741
1742 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1743 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1744 IEM_MC_PREPARE_AVX_USAGE();
1745 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1746 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1747
1748 IEM_MC_ADVANCE_RIP_AND_FINISH();
1749 IEM_MC_END();
1750 }
1751 }
1752 else
1753 {
1754 /*
1755 * Register, memory.
1756 */
1757 if (pVCpu->iem.s.uVexLength)
1758 {
1759 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1760 IEM_MC_LOCAL(uint64_t, uSrc);
1761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1762
1763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1764 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1765 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1766 IEM_MC_PREPARE_AVX_USAGE();
1767
1768 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1769 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1770
1771 IEM_MC_ADVANCE_RIP_AND_FINISH();
1772 IEM_MC_END();
1773 }
1774 else
1775 {
1776 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1777 IEM_MC_LOCAL(uint64_t, uSrc);
1778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1779
1780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1781 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1782 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1783 IEM_MC_PREPARE_AVX_USAGE();
1784
1785 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1786 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1787
1788 IEM_MC_ADVANCE_RIP_AND_FINISH();
1789 IEM_MC_END();
1790 }
1791 }
1792}
1793
1794
1795/** Opcode VEX.66.0F38 0x5a. */
1796FNIEMOP_DEF(iemOp_vbroadcasti128_Vqq_Mdq)
1797{
1798 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTI128, vbroadcasti128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1800 if (IEM_IS_MODRM_REG_MODE(bRm))
1801 {
1802 /*
1803 * No register, register.
1804 */
1805 IEMOP_RAISE_INVALID_OPCODE_RET();
1806 }
1807 else
1808 {
1809 /*
1810 * Register, memory.
1811 */
1812 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1813 IEM_MC_LOCAL(RTUINT128U, uSrc);
1814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1815
1816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1817 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
1818 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1819 IEM_MC_PREPARE_AVX_USAGE();
1820
1821 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1822 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1823
1824 IEM_MC_ADVANCE_RIP_AND_FINISH();
1825 IEM_MC_END();
1826 }
1827}
1828
1829
1830/* Opcode VEX.66.0F38 0x5b - invalid. */
1831/* Opcode VEX.66.0F38 0x5c - invalid. */
1832/* Opcode VEX.66.0F38 0x5d - invalid. */
1833/* Opcode VEX.66.0F38 0x5e - invalid. */
1834/* Opcode VEX.66.0F38 0x5f - invalid. */
1835
1836/* Opcode VEX.66.0F38 0x60 - invalid. */
1837/* Opcode VEX.66.0F38 0x61 - invalid. */
1838/* Opcode VEX.66.0F38 0x62 - invalid. */
1839/* Opcode VEX.66.0F38 0x63 - invalid. */
1840/* Opcode VEX.66.0F38 0x64 - invalid. */
1841/* Opcode VEX.66.0F38 0x65 - invalid. */
1842/* Opcode VEX.66.0F38 0x66 - invalid. */
1843/* Opcode VEX.66.0F38 0x67 - invalid. */
1844/* Opcode VEX.66.0F38 0x68 - invalid. */
1845/* Opcode VEX.66.0F38 0x69 - invalid. */
1846/* Opcode VEX.66.0F38 0x6a - invalid. */
1847/* Opcode VEX.66.0F38 0x6b - invalid. */
1848/* Opcode VEX.66.0F38 0x6c - invalid. */
1849/* Opcode VEX.66.0F38 0x6d - invalid. */
1850/* Opcode VEX.66.0F38 0x6e - invalid. */
1851/* Opcode VEX.66.0F38 0x6f - invalid. */
1852
1853/* Opcode VEX.66.0F38 0x70 - invalid. */
1854/* Opcode VEX.66.0F38 0x71 - invalid. */
1855/* Opcode VEX.66.0F38 0x72 - invalid. */
1856/* Opcode VEX.66.0F38 0x73 - invalid. */
1857/* Opcode VEX.66.0F38 0x74 - invalid. */
1858/* Opcode VEX.66.0F38 0x75 - invalid. */
1859/* Opcode VEX.66.0F38 0x76 - invalid. */
1860/* Opcode VEX.66.0F38 0x77 - invalid. */
1861
1862
1863/** Opcode VEX.66.0F38 0x78. */
1864FNIEMOP_DEF(iemOp_vpbroadcastb_Vx_Wx)
1865{
1866 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTB, vpbroadcastb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1868 if (IEM_IS_MODRM_REG_MODE(bRm))
1869 {
1870 /*
1871 * Register, register.
1872 */
1873 if (pVCpu->iem.s.uVexLength)
1874 {
1875 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1876 IEM_MC_LOCAL(uint8_t, uSrc);
1877
1878 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1879 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1880 IEM_MC_PREPARE_AVX_USAGE();
1881
1882 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1883 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1884
1885 IEM_MC_ADVANCE_RIP_AND_FINISH();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1891 IEM_MC_LOCAL(uint8_t, uSrc);
1892
1893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1895 IEM_MC_PREPARE_AVX_USAGE();
1896 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1897 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1898
1899 IEM_MC_ADVANCE_RIP_AND_FINISH();
1900 IEM_MC_END();
1901 }
1902 }
1903 else
1904 {
1905 /*
1906 * Register, memory.
1907 */
1908 if (pVCpu->iem.s.uVexLength)
1909 {
1910 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1911 IEM_MC_LOCAL(uint8_t, uSrc);
1912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1913
1914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1915 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1916 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1917 IEM_MC_PREPARE_AVX_USAGE();
1918
1919 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1920 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1921
1922 IEM_MC_ADVANCE_RIP_AND_FINISH();
1923 IEM_MC_END();
1924 }
1925 else
1926 {
1927 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1928 IEM_MC_LOCAL(uint8_t, uSrc);
1929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1930
1931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1932 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1933 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1934 IEM_MC_PREPARE_AVX_USAGE();
1935
1936 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1937 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1938
1939 IEM_MC_ADVANCE_RIP_AND_FINISH();
1940 IEM_MC_END();
1941 }
1942 }
1943}
1944
1945
1946/** Opcode VEX.66.0F38 0x79. */
1947FNIEMOP_DEF(iemOp_vpbroadcastw_Vx_Wx)
1948{
1949 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTW, vpbroadcastw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1951 if (IEM_IS_MODRM_REG_MODE(bRm))
1952 {
1953 /*
1954 * Register, register.
1955 */
1956 if (pVCpu->iem.s.uVexLength)
1957 {
1958 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1959 IEM_MC_LOCAL(uint16_t, uSrc);
1960
1961 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1962 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1963 IEM_MC_PREPARE_AVX_USAGE();
1964
1965 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1966 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1967
1968 IEM_MC_ADVANCE_RIP_AND_FINISH();
1969 IEM_MC_END();
1970 }
1971 else
1972 {
1973 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1974 IEM_MC_LOCAL(uint16_t, uSrc);
1975
1976 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1977 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1978 IEM_MC_PREPARE_AVX_USAGE();
1979 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1980 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1981
1982 IEM_MC_ADVANCE_RIP_AND_FINISH();
1983 IEM_MC_END();
1984 }
1985 }
1986 else
1987 {
1988 /*
1989 * Register, memory.
1990 */
1991 if (pVCpu->iem.s.uVexLength)
1992 {
1993 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1994 IEM_MC_LOCAL(uint16_t, uSrc);
1995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1996
1997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1998 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1999 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2000 IEM_MC_PREPARE_AVX_USAGE();
2001
2002 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2003 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2004
2005 IEM_MC_ADVANCE_RIP_AND_FINISH();
2006 IEM_MC_END();
2007 }
2008 else
2009 {
2010 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2011 IEM_MC_LOCAL(uint16_t, uSrc);
2012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2013
2014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2015 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
2016 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2017 IEM_MC_PREPARE_AVX_USAGE();
2018
2019 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2020 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2021
2022 IEM_MC_ADVANCE_RIP_AND_FINISH();
2023 IEM_MC_END();
2024 }
2025 }
2026}
2027
2028
2029/* Opcode VEX.66.0F38 0x7a - invalid. */
2030/* Opcode VEX.66.0F38 0x7b - invalid. */
2031/* Opcode VEX.66.0F38 0x7c - invalid. */
2032/* Opcode VEX.66.0F38 0x7d - invalid. */
2033/* Opcode VEX.66.0F38 0x7e - invalid. */
2034/* Opcode VEX.66.0F38 0x7f - invalid. */
2035
2036/* Opcode VEX.66.0F38 0x80 - invalid (legacy only). */
2037/* Opcode VEX.66.0F38 0x81 - invalid (legacy only). */
2038/* Opcode VEX.66.0F38 0x82 - invalid (legacy only). */
2039/* Opcode VEX.66.0F38 0x83 - invalid. */
2040/* Opcode VEX.66.0F38 0x84 - invalid. */
2041/* Opcode VEX.66.0F38 0x85 - invalid. */
2042/* Opcode VEX.66.0F38 0x86 - invalid. */
2043/* Opcode VEX.66.0F38 0x87 - invalid. */
2044/* Opcode VEX.66.0F38 0x88 - invalid. */
2045/* Opcode VEX.66.0F38 0x89 - invalid. */
2046/* Opcode VEX.66.0F38 0x8a - invalid. */
2047/* Opcode VEX.66.0F38 0x8b - invalid. */
2048
2049
2050/** Opcode VEX.66.0F38 0x8c. */
2051FNIEMOP_DEF(iemOp_vpmaskmovd_q_Vx_Hx_Mx)
2052{
2053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2054 if (!IEM_IS_MODRM_REG_MODE(bRm))
2055 {
2056 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2057 {
2058 // IEMOP_MNEMONIC3(RM, VPMASKMOVQ, vpmaskmovq, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
2059 if (pVCpu->iem.s.uVexLength)
2060 {
2061 /*
2062 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
2063 */
2064 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2065 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
2066 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
2067 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
2068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2069 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
2070 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2071
2072 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2073 IEM_MC_PREPARE_AVX_USAGE();
2074
2075 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
2076
2077 IEM_MC_END();
2078 }
2079 else
2080 {
2081 /*
2082 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
2083 */
2084 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2085 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
2086 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
2087 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
2088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2089 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
2090 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2091
2092 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2093 IEM_MC_PREPARE_AVX_USAGE();
2094
2095 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
2096
2097 IEM_MC_END();
2098 }
2099 }
2100 else
2101 {
2102 // IEMOP_MNEMONIC3(RM, VPMASKMOVD, vpmaskmovd, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
2103 if (pVCpu->iem.s.uVexLength)
2104 {
2105 /*
2106 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
2107 */
2108 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2109 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
2110 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
2111 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
2112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2113 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
2114 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2115
2116 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2117 IEM_MC_PREPARE_AVX_USAGE();
2118
2119 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
2120
2121 IEM_MC_END();
2122 }
2123 else
2124 {
2125 /*
2126 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
2127 */
2128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2129 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
2130 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
2131 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2133 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
2134 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2135
2136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2137 IEM_MC_PREPARE_AVX_USAGE();
2138
2139 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
2140
2141 IEM_MC_END();
2142 }
2143 }
2144 }
2145 else
2146 {
2147 /* The register, register encoding is invalid. */
2148 IEMOP_RAISE_INVALID_OPCODE_RET();
2149 }
2150}
2151
2152
2153/* Opcode VEX.66.0F38 0x8d - invalid. */
2154/** Opcode VEX.66.0F38 0x8e. */
2155
2156
2157/** Opcode VEX.66.0F38 0x8e. */
2158FNIEMOP_DEF(iemOp_vpmaskmovd_q_Mx_Vx_Hx)
2159{
2160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2161 if (!IEM_IS_MODRM_REG_MODE(bRm))
2162 {
2163 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2164 {
2165 // IEMOP_MNEMONIC3(RM, VPMASKMOVQ, vpmaskmovq, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
2166 if (pVCpu->iem.s.uVexLength)
2167 {
2168 /*
2169 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
2170 */
2171 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2172
2173 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2175 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2176 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2177 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2178 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2179
2180 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2181 IEM_MC_PREPARE_AVX_USAGE();
2182
2183 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
2184
2185 IEM_MC_END();
2186 }
2187 else
2188 {
2189 /*
2190 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
2191 */
2192 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2193
2194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2196 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2197 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2198 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2199 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2200
2201 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2202 IEM_MC_PREPARE_AVX_USAGE();
2203
2204 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
2205
2206 IEM_MC_END();
2207 }
2208 }
2209 else
2210 {
2211 // IEMOP_MNEMONIC3(RM, VPMASKMOVD, vpmaskmovd, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
2212 if (pVCpu->iem.s.uVexLength)
2213 {
2214 /*
2215 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
2216 */
2217 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2218
2219 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2221 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2222 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2223 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2224 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2225
2226 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2227 IEM_MC_PREPARE_AVX_USAGE();
2228
2229 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
2230
2231 IEM_MC_END();
2232 }
2233 else
2234 {
2235 /*
2236 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
2237 */
2238 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2239
2240 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2242 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2243 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2244 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2245 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2246
2247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2248 IEM_MC_PREPARE_AVX_USAGE();
2249
2250 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
2251
2252 IEM_MC_END();
2253 }
2254 }
2255 }
2256 else
2257 {
2258 /* The register, register encoding is invalid. */
2259 IEMOP_RAISE_INVALID_OPCODE_RET();
2260 }
2261}
2262
2263
2264/* Opcode VEX.66.0F38 0x8f - invalid. */
2265
2266/** Opcode VEX.66.0F38 0x90 (vex only). */
2267FNIEMOP_STUB(iemOp_vpgatherdd_q_Vx_Hx_Wx);
2268/** Opcode VEX.66.0F38 0x91 (vex only). */
2269FNIEMOP_STUB(iemOp_vpgatherqd_q_Vx_Hx_Wx);
2270/** Opcode VEX.66.0F38 0x92 (vex only). */
2271FNIEMOP_STUB(iemOp_vgatherdps_d_Vx_Hx_Wx);
2272/** Opcode VEX.66.0F38 0x93 (vex only). */
2273FNIEMOP_STUB(iemOp_vgatherqps_d_Vx_Hx_Wx);
2274/* Opcode VEX.66.0F38 0x94 - invalid. */
2275/* Opcode VEX.66.0F38 0x95 - invalid. */
2276/** Opcode VEX.66.0F38 0x96 (vex only). */
2277FNIEMOP_STUB(iemOp_vfmaddsub132ps_d_Vx_Hx_Wx);
2278/** Opcode VEX.66.0F38 0x97 (vex only). */
2279FNIEMOP_STUB(iemOp_vfmsubadd132ps_d_Vx_Hx_Wx);
2280/** Opcode VEX.66.0F38 0x98 (vex only). */
2281FNIEMOP_STUB(iemOp_vfmadd132ps_d_Vx_Hx_Wx);
2282/** Opcode VEX.66.0F38 0x99 (vex only). */
2283FNIEMOP_STUB(iemOp_vfmadd132ss_d_Vx_Hx_Wx);
2284/** Opcode VEX.66.0F38 0x9a (vex only). */
2285FNIEMOP_STUB(iemOp_vfmsub132ps_d_Vx_Hx_Wx);
2286/** Opcode VEX.66.0F38 0x9b (vex only). */
2287FNIEMOP_STUB(iemOp_vfmsub132ss_d_Vx_Hx_Wx);
2288/** Opcode VEX.66.0F38 0x9c (vex only). */
2289FNIEMOP_STUB(iemOp_vfnmadd132ps_d_Vx_Hx_Wx);
2290/** Opcode VEX.66.0F38 0x9d (vex only). */
2291FNIEMOP_STUB(iemOp_vfnmadd132ss_d_Vx_Hx_Wx);
2292/** Opcode VEX.66.0F38 0x9e (vex only). */
2293FNIEMOP_STUB(iemOp_vfnmsub132ps_d_Vx_Hx_Wx);
2294/** Opcode VEX.66.0F38 0x9f (vex only). */
2295FNIEMOP_STUB(iemOp_vfnmsub132ss_d_Vx_Hx_Wx);
2296
2297/* Opcode VEX.66.0F38 0xa0 - invalid. */
2298/* Opcode VEX.66.0F38 0xa1 - invalid. */
2299/* Opcode VEX.66.0F38 0xa2 - invalid. */
2300/* Opcode VEX.66.0F38 0xa3 - invalid. */
2301/* Opcode VEX.66.0F38 0xa4 - invalid. */
2302/* Opcode VEX.66.0F38 0xa5 - invalid. */
2303/** Opcode VEX.66.0F38 0xa6 (vex only). */
2304FNIEMOP_STUB(iemOp_vfmaddsub213ps_d_Vx_Hx_Wx);
2305/** Opcode VEX.66.0F38 0xa7 (vex only). */
2306FNIEMOP_STUB(iemOp_vfmsubadd213ps_d_Vx_Hx_Wx);
2307/** Opcode VEX.66.0F38 0xa8 (vex only). */
2308FNIEMOP_STUB(iemOp_vfmadd213ps_d_Vx_Hx_Wx);
2309/** Opcode VEX.66.0F38 0xa9 (vex only). */
2310FNIEMOP_STUB(iemOp_vfmadd213ss_d_Vx_Hx_Wx);
2311/** Opcode VEX.66.0F38 0xaa (vex only). */
2312FNIEMOP_STUB(iemOp_vfmsub213ps_d_Vx_Hx_Wx);
2313/** Opcode VEX.66.0F38 0xab (vex only). */
2314FNIEMOP_STUB(iemOp_vfmsub213ss_d_Vx_Hx_Wx);
2315/** Opcode VEX.66.0F38 0xac (vex only). */
2316FNIEMOP_STUB(iemOp_vfnmadd213ps_d_Vx_Hx_Wx);
2317/** Opcode VEX.66.0F38 0xad (vex only). */
2318FNIEMOP_STUB(iemOp_vfnmadd213ss_d_Vx_Hx_Wx);
2319/** Opcode VEX.66.0F38 0xae (vex only). */
2320FNIEMOP_STUB(iemOp_vfnmsub213ps_d_Vx_Hx_Wx);
2321/** Opcode VEX.66.0F38 0xaf (vex only). */
2322FNIEMOP_STUB(iemOp_vfnmsub213ss_d_Vx_Hx_Wx);
2323
2324/* Opcode VEX.66.0F38 0xb0 - invalid. */
2325/* Opcode VEX.66.0F38 0xb1 - invalid. */
2326/* Opcode VEX.66.0F38 0xb2 - invalid. */
2327/* Opcode VEX.66.0F38 0xb3 - invalid. */
2328/* Opcode VEX.66.0F38 0xb4 - invalid. */
2329/* Opcode VEX.66.0F38 0xb5 - invalid. */
2330/** Opcode VEX.66.0F38 0xb6 (vex only). */
2331FNIEMOP_STUB(iemOp_vfmaddsub231ps_d_Vx_Hx_Wx);
2332/** Opcode VEX.66.0F38 0xb7 (vex only). */
2333FNIEMOP_STUB(iemOp_vfmsubadd231ps_d_Vx_Hx_Wx);
2334/** Opcode VEX.66.0F38 0xb8 (vex only). */
2335FNIEMOP_STUB(iemOp_vfmadd231ps_d_Vx_Hx_Wx);
2336/** Opcode VEX.66.0F38 0xb9 (vex only). */
2337FNIEMOP_STUB(iemOp_vfmadd231ss_d_Vx_Hx_Wx);
2338/** Opcode VEX.66.0F38 0xba (vex only). */
2339FNIEMOP_STUB(iemOp_vfmsub231ps_d_Vx_Hx_Wx);
2340/** Opcode VEX.66.0F38 0xbb (vex only). */
2341FNIEMOP_STUB(iemOp_vfmsub231ss_d_Vx_Hx_Wx);
2342/** Opcode VEX.66.0F38 0xbc (vex only). */
2343FNIEMOP_STUB(iemOp_vfnmadd231ps_d_Vx_Hx_Wx);
2344/** Opcode VEX.66.0F38 0xbd (vex only). */
2345FNIEMOP_STUB(iemOp_vfnmadd231ss_d_Vx_Hx_Wx);
2346/** Opcode VEX.66.0F38 0xbe (vex only). */
2347FNIEMOP_STUB(iemOp_vfnmsub231ps_d_Vx_Hx_Wx);
2348/** Opcode VEX.66.0F38 0xbf (vex only). */
2349FNIEMOP_STUB(iemOp_vfnmsub231ss_d_Vx_Hx_Wx);
2350
2351/* Opcode VEX.0F38 0xc0 - invalid. */
2352/* Opcode VEX.66.0F38 0xc0 - invalid. */
2353/* Opcode VEX.0F38 0xc1 - invalid. */
2354/* Opcode VEX.66.0F38 0xc1 - invalid. */
2355/* Opcode VEX.0F38 0xc2 - invalid. */
2356/* Opcode VEX.66.0F38 0xc2 - invalid. */
2357/* Opcode VEX.0F38 0xc3 - invalid. */
2358/* Opcode VEX.66.0F38 0xc3 - invalid. */
2359/* Opcode VEX.0F38 0xc4 - invalid. */
2360/* Opcode VEX.66.0F38 0xc4 - invalid. */
2361/* Opcode VEX.0F38 0xc5 - invalid. */
2362/* Opcode VEX.66.0F38 0xc5 - invalid. */
2363/* Opcode VEX.0F38 0xc6 - invalid. */
2364/* Opcode VEX.66.0F38 0xc6 - invalid. */
2365/* Opcode VEX.0F38 0xc7 - invalid. */
2366/* Opcode VEX.66.0F38 0xc7 - invalid. */
2367/* Opcode VEX.0F38 0xc8 - invalid. */
2368/* Opcode VEX.66.0F38 0xc8 - invalid. */
2369/* Opcode VEX.0F38 0xc9 - invalid. */
2370/* Opcode VEX.66.0F38 0xc9 - invalid. */
2371/* Opcode VEX.0F38 0xca. */
2372/* Opcode VEX.66.0F38 0xca - invalid. */
2373/* Opcode VEX.0F38 0xcb - invalid. */
2374/* Opcode VEX.66.0F38 0xcb - invalid. */
2375/* Opcode VEX.0F38 0xcc - invalid. */
2376/* Opcode VEX.66.0F38 0xcc - invalid. */
2377/* Opcode VEX.0F38 0xcd - invalid. */
2378/* Opcode VEX.66.0F38 0xcd - invalid. */
2379/* Opcode VEX.0F38 0xce - invalid. */
2380/* Opcode VEX.66.0F38 0xce - invalid. */
2381/* Opcode VEX.0F38 0xcf - invalid. */
2382/* Opcode VEX.66.0F38 0xcf - invalid. */
2383
2384/* Opcode VEX.66.0F38 0xd0 - invalid. */
2385/* Opcode VEX.66.0F38 0xd1 - invalid. */
2386/* Opcode VEX.66.0F38 0xd2 - invalid. */
2387/* Opcode VEX.66.0F38 0xd3 - invalid. */
2388/* Opcode VEX.66.0F38 0xd4 - invalid. */
2389/* Opcode VEX.66.0F38 0xd5 - invalid. */
2390/* Opcode VEX.66.0F38 0xd6 - invalid. */
2391/* Opcode VEX.66.0F38 0xd7 - invalid. */
2392/* Opcode VEX.66.0F38 0xd8 - invalid. */
2393/* Opcode VEX.66.0F38 0xd9 - invalid. */
2394/* Opcode VEX.66.0F38 0xda - invalid. */
2395
2396
2397/** Opcode VEX.66.0F38 0xdb. */
2398FNIEMOP_DEF(iemOp_vaesimc_Vdq_Wdq)
2399{
2400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2401 if (IEM_IS_MODRM_REG_MODE(bRm))
2402 {
2403 /*
2404 * Register, register.
2405 */
2406 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2407 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2408 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2409 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2410 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2411 IEM_MC_PREPARE_AVX_USAGE();
2412 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2413 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2414 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback), puDst, puSrc);
2415 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2416 IEM_MC_ADVANCE_RIP_AND_FINISH();
2417 IEM_MC_END();
2418 }
2419 else
2420 {
2421 /*
2422 * Register, memory.
2423 */
2424 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2425 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2426 IEM_MC_LOCAL(RTUINT128U, uSrc);
2427 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2429
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2431 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2432 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2433 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2434
2435 IEM_MC_PREPARE_AVX_USAGE();
2436 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2437 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback), puDst, puSrc);
2438 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2439 IEM_MC_ADVANCE_RIP_AND_FINISH();
2440 IEM_MC_END();
2441 }
2442}
2443
2444
2445/** Opcode VEX.66.0F38 0xdc. */
2446FNIEMOP_DEF(iemOp_vaesenc_Vdq_Wdq)
2447{
2448 IEMOP_MNEMONIC3(VEX_RVM, VAESENC, vaesenc, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2449 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx,
2450 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesenc_u128, iemAImpl_vaesenc_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */
2451}
2452
2453
2454/** Opcode VEX.66.0F38 0xdd. */
2455FNIEMOP_DEF(iemOp_vaesenclast_Vdq_Wdq)
2456{
2457 IEMOP_MNEMONIC3(VEX_RVM, VAESENCLAST, vaesenclast, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2458 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx,
2459 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesenclast_u128, iemAImpl_vaesenclast_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */
2460}
2461
2462
2463/** Opcode VEX.66.0F38 0xde. */
2464FNIEMOP_DEF(iemOp_vaesdec_Vdq_Wdq)
2465{
2466 IEMOP_MNEMONIC3(VEX_RVM, VAESDEC, vaesdec, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2467 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx,
2468 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesdec_u128, iemAImpl_vaesdec_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */
2469}
2470
2471
2472/** Opcode VEX.66.0F38 0xdf. */
2473FNIEMOP_DEF(iemOp_vaesdeclast_Vdq_Wdq)
2474{
2475 IEMOP_MNEMONIC3(VEX_RVM, VAESDECLAST, vaesdeclast, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2476 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx,
2477 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesdeclast_u128, iemAImpl_vaesdeclast_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */
2478}
2479
2480
2481/* Opcode VEX.66.0F38 0xe0 - invalid. */
2482/* Opcode VEX.66.0F38 0xe1 - invalid. */
2483/* Opcode VEX.66.0F38 0xe2 - invalid. */
2484/* Opcode VEX.66.0F38 0xe3 - invalid. */
2485/* Opcode VEX.66.0F38 0xe4 - invalid. */
2486/* Opcode VEX.66.0F38 0xe5 - invalid. */
2487/* Opcode VEX.66.0F38 0xe6 - invalid. */
2488/* Opcode VEX.66.0F38 0xe7 - invalid. */
2489/* Opcode VEX.66.0F38 0xe8 - invalid. */
2490/* Opcode VEX.66.0F38 0xe9 - invalid. */
2491/* Opcode VEX.66.0F38 0xea - invalid. */
2492/* Opcode VEX.66.0F38 0xeb - invalid. */
2493/* Opcode VEX.66.0F38 0xec - invalid. */
2494/* Opcode VEX.66.0F38 0xed - invalid. */
2495/* Opcode VEX.66.0F38 0xee - invalid. */
2496/* Opcode VEX.66.0F38 0xef - invalid. */
2497
2498
2499/* Opcode VEX.0F38 0xf0 - invalid (legacy only). */
2500/* Opcode VEX.66.0F38 0xf0 - invalid (legacy only). */
2501/* Opcode VEX.F3.0F38 0xf0 - invalid. */
2502/* Opcode VEX.F2.0F38 0xf0 - invalid (legacy only). */
2503
2504/* Opcode VEX.0F38 0xf1 - invalid (legacy only). */
2505/* Opcode VEX.66.0F38 0xf1 - invalid (legacy only). */
2506/* Opcode VEX.F3.0F38 0xf1 - invalid. */
2507/* Opcode VEX.F2.0F38 0xf1 - invalid (legacy only). */
2508
2509/**
2510 * @opcode 0xf2
2511 * @oppfx none
2512 * @opflmodify cf,pf,af,zf,sf,of
2513 * @opflclear cf,of
2514 * @opflundef pf,af
2515 * @note VEX only
2516 */
2517FNIEMOP_DEF(iemOp_andn_Gy_By_Ey)
2518{
2519 IEMOP_MNEMONIC3(VEX_RVM, ANDN, andn, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2520 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2521 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF);
2522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2523 if (IEM_IS_MODRM_REG_MODE(bRm))
2524 {
2525 /*
2526 * Register, register.
2527 */
2528 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2529 {
2530 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2531 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2532 IEM_MC_ARG(uint64_t *, pDst, 0);
2533 IEM_MC_ARG(uint64_t, uSrc1, 1);
2534 IEM_MC_ARG(uint64_t, uSrc2, 2);
2535 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2536 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2537 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2538 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2539 IEM_MC_REF_EFLAGS(pEFlags);
2540 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
2541 pDst, uSrc1, uSrc2, pEFlags);
2542 IEM_MC_ADVANCE_RIP_AND_FINISH();
2543 IEM_MC_END();
2544 }
2545 else
2546 {
2547 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2548 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2549 IEM_MC_ARG(uint32_t *, pDst, 0);
2550 IEM_MC_ARG(uint32_t, uSrc1, 1);
2551 IEM_MC_ARG(uint32_t, uSrc2, 2);
2552 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2553 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2554 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2555 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2556 IEM_MC_REF_EFLAGS(pEFlags);
2557 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
2558 pDst, uSrc1, uSrc2, pEFlags);
2559 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2560 IEM_MC_ADVANCE_RIP_AND_FINISH();
2561 IEM_MC_END();
2562 }
2563 }
2564 else
2565 {
2566 /*
2567 * Register, memory.
2568 */
2569 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2570 {
2571 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2572 IEM_MC_ARG(uint64_t *, pDst, 0);
2573 IEM_MC_ARG(uint64_t, uSrc1, 1);
2574 IEM_MC_ARG(uint64_t, uSrc2, 2);
2575 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2578 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2579 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2580 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2581 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2582 IEM_MC_REF_EFLAGS(pEFlags);
2583 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
2584 pDst, uSrc1, uSrc2, pEFlags);
2585 IEM_MC_ADVANCE_RIP_AND_FINISH();
2586 IEM_MC_END();
2587 }
2588 else
2589 {
2590 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2591 IEM_MC_ARG(uint32_t *, pDst, 0);
2592 IEM_MC_ARG(uint32_t, uSrc1, 1);
2593 IEM_MC_ARG(uint32_t, uSrc2, 2);
2594 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2598 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2599 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2600 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2601 IEM_MC_REF_EFLAGS(pEFlags);
2602 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
2603 pDst, uSrc1, uSrc2, pEFlags);
2604 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2605 IEM_MC_ADVANCE_RIP_AND_FINISH();
2606 IEM_MC_END();
2607 }
2608 }
2609}
2610
2611/* Opcode VEX.66.0F38 0xf2 - invalid. */
2612/* Opcode VEX.F3.0F38 0xf2 - invalid. */
2613/* Opcode VEX.F2.0F38 0xf2 - invalid. */
2614
2615
2616/* Opcode VEX.0F38 0xf3 - invalid. */
2617/* Opcode VEX.66.0F38 0xf3 - invalid. */
2618
2619/* Opcode VEX.F3.0F38 0xf3 /0 - invalid. */
2620
2621/** Body for the vex group 17 instructions. */
2622#define IEMOP_BODY_By_Ey(a_Instr) \
2623 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2624 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF); \
2625 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2626 { \
2627 /* \
2628 * Register, register. \
2629 */ \
2630 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2631 { \
2632 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2633 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2634 IEM_MC_ARG(uint64_t, uSrc, 2); \
2635 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2636 IEM_MC_ARG(uint64_t *, pDst, 1); \
2637 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2638 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2639 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2640 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
2641 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \
2642 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2643 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2644 IEM_MC_END(); \
2645 } \
2646 else \
2647 { \
2648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2649 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2650 IEM_MC_ARG(uint32_t, uSrc, 2); \
2651 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2652 IEM_MC_ARG(uint32_t *, pDst, 1); \
2653 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2654 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2655 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2656 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
2657 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \
2658 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2659 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2661 IEM_MC_END(); \
2662 } \
2663 } \
2664 else \
2665 { \
2666 /* \
2667 * Register, memory. \
2668 */ \
2669 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2670 { \
2671 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2674 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2675 \
2676 IEM_MC_ARG(uint64_t, uSrc, 2); \
2677 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2678 IEM_MC_ARG(uint64_t *, pDst, 1); \
2679 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2680 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2681 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2682 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
2683 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \
2684 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2685 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2686 IEM_MC_END(); \
2687 } \
2688 else \
2689 { \
2690 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2693 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2694 \
2695 IEM_MC_ARG(uint32_t, uSrc, 2); \
2696 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2697 IEM_MC_ARG(uint32_t *, pDst, 1); \
2698 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2699 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2700 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2701 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
2702 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \
2703 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2704 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2705 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2706 IEM_MC_END(); \
2707 } \
2708 } \
2709 (void)0
2710
2711
2712/**
2713 * @opmaps vexgrp17
2714 * @opcode /1
2715 * @opflmodify cf,pf,af,zf,sf,of
2716 * @opflclear of
2717 * @opflundef pf,af
2718 */
2719FNIEMOP_DEF_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm)
2720{
2721 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2722 IEMOP_BODY_By_Ey(blsr);
2723}
2724
2725
2726/**
2727 * @opmaps vexgrp17
2728 * @opcode /2
2729 * @opflmodify cf,pf,af,zf,sf,of
2730 * @opflclear zf,of
2731 * @opflundef pf,af
2732 */
2733FNIEMOP_DEF_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm)
2734{
2735 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2736 IEMOP_BODY_By_Ey(blsmsk);
2737}
2738
2739
2740/**
2741 * @opmaps vexgrp17
2742 * @opcode /3
2743 * @opflmodify cf,pf,af,zf,sf,of
2744 * @opflclear of
2745 * @opflundef pf,af
2746 */
2747FNIEMOP_DEF_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm)
2748{
2749 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2750 IEMOP_BODY_By_Ey(blsi);
2751}
2752
2753
2754/* Opcode VEX.F3.0F38 0xf3 /4 - invalid. */
2755/* Opcode VEX.F3.0F38 0xf3 /5 - invalid. */
2756/* Opcode VEX.F3.0F38 0xf3 /6 - invalid. */
2757/* Opcode VEX.F3.0F38 0xf3 /7 - invalid. */
2758
2759/**
2760 * Group 17 jump table for the VEX.F3 variant.
2761 */
2762IEM_STATIC const PFNIEMOPRM g_apfnVexGroup17_f3[] =
2763{
2764 /* /0 */ iemOp_InvalidWithRM,
2765 /* /1 */ iemOp_VGrp17_blsr_By_Ey,
2766 /* /2 */ iemOp_VGrp17_blsmsk_By_Ey,
2767 /* /3 */ iemOp_VGrp17_blsi_By_Ey,
2768 /* /4 */ iemOp_InvalidWithRM,
2769 /* /5 */ iemOp_InvalidWithRM,
2770 /* /6 */ iemOp_InvalidWithRM,
2771 /* /7 */ iemOp_InvalidWithRM
2772};
2773AssertCompile(RT_ELEMENTS(g_apfnVexGroup17_f3) == 8);
2774
2775/** Opcode VEX.F3.0F38 0xf3 - invalid (vex only - group 17). */
2776FNIEMOP_DEF(iemOp_VGrp17_f3)
2777{
2778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2779 return FNIEMOP_CALL_1(g_apfnVexGroup17_f3[IEM_GET_MODRM_REG_8(bRm)], bRm);
2780}
2781
2782/* Opcode VEX.F2.0F38 0xf3 - invalid (vex only - group 17). */
2783
2784
2785/* Opcode VEX.0F38 0xf4 - invalid. */
2786/* Opcode VEX.66.0F38 0xf4 - invalid. */
2787/* Opcode VEX.F3.0F38 0xf4 - invalid. */
2788/* Opcode VEX.F2.0F38 0xf4 - invalid. */
2789
2790/** Body for BZHI, BEXTR, ++; assumes VEX.L must be 0. */
2791#define IEMOP_BODY_Gy_Ey_By(a_Instr, a_fFeatureMember, a_fUndefFlags) \
2792 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2793 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
2794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2795 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2796 { \
2797 /* \
2798 * Register, register. \
2799 */ \
2800 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2801 { \
2802 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2803 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2804 IEM_MC_ARG(uint64_t *, pDst, 0); \
2805 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2806 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2807 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2808 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2809 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2810 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2811 IEM_MC_REF_EFLAGS(pEFlags); \
2812 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2813 iemAImpl_ ## a_Instr ## _u64_fallback), \
2814 pDst, uSrc1, uSrc2, pEFlags); \
2815 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2816 IEM_MC_END(); \
2817 } \
2818 else \
2819 { \
2820 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2821 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2822 IEM_MC_ARG(uint32_t *, pDst, 0); \
2823 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2824 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2825 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2826 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2827 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2828 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2829 IEM_MC_REF_EFLAGS(pEFlags); \
2830 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2831 iemAImpl_ ## a_Instr ## _u32_fallback), \
2832 pDst, uSrc1, uSrc2, pEFlags); \
2833 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2834 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2835 IEM_MC_END(); \
2836 } \
2837 } \
2838 else \
2839 { \
2840 /* \
2841 * Register, memory. \
2842 */ \
2843 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2844 { \
2845 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2846 IEM_MC_ARG(uint64_t *, pDst, 0); \
2847 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2848 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2849 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2852 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2853 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2854 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2855 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2856 IEM_MC_REF_EFLAGS(pEFlags); \
2857 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2858 iemAImpl_ ## a_Instr ## _u64_fallback), \
2859 pDst, uSrc1, uSrc2, pEFlags); \
2860 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2861 IEM_MC_END(); \
2862 } \
2863 else \
2864 { \
2865 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2866 IEM_MC_ARG(uint32_t *, pDst, 0); \
2867 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2868 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2869 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2872 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2873 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2874 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2875 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2876 IEM_MC_REF_EFLAGS(pEFlags); \
2877 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2878 iemAImpl_ ## a_Instr ## _u32_fallback), \
2879 pDst, uSrc1, uSrc2, pEFlags); \
2880 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2881 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2882 IEM_MC_END(); \
2883 } \
2884 } \
2885 (void)0
2886
2887/** Body for SARX, SHLX, SHRX; assumes VEX.L must be 0. */
2888#define IEMOP_BODY_Gy_Ey_By_NoEflags(a_Instr, a_fFeatureMember) \
2889 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2891 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2892 { \
2893 /* \
2894 * Register, register. \
2895 */ \
2896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2897 { \
2898 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2899 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2900 IEM_MC_ARG(uint64_t *, pDst, 0); \
2901 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2902 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2903 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2904 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2905 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2906 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2907 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2908 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2909 IEM_MC_END(); \
2910 } \
2911 else \
2912 { \
2913 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2914 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2915 IEM_MC_ARG(uint32_t *, pDst, 0); \
2916 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2917 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2918 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2919 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2920 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2921 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2922 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2923 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2924 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2925 IEM_MC_END(); \
2926 } \
2927 } \
2928 else \
2929 { \
2930 /* \
2931 * Register, memory. \
2932 */ \
2933 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2934 { \
2935 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2936 IEM_MC_ARG(uint64_t *, pDst, 0); \
2937 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2938 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2941 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2942 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2943 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2944 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2945 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2946 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2947 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2948 IEM_MC_END(); \
2949 } \
2950 else \
2951 { \
2952 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2953 IEM_MC_ARG(uint32_t *, pDst, 0); \
2954 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2955 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2958 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2959 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2960 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2961 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2962 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2963 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2964 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2965 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2966 IEM_MC_END(); \
2967 } \
2968 } \
2969 (void)0
2970
2971/**
2972 * @opcode 0xf5
2973 * @oppfx none
2974 * @opflmodify cf,pf,af,zf,sf,of
2975 * @opflclear of
2976 * @opflundef pf,af
2977 * @note VEX only
2978 */
2979FNIEMOP_DEF(iemOp_bzhi_Gy_Ey_By)
2980{
2981 IEMOP_MNEMONIC3(VEX_RMV, BZHI, bzhi, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2982 IEMOP_BODY_Gy_Ey_By(bzhi, fBmi2, X86_EFL_AF | X86_EFL_PF);
2983}
2984
2985/* Opcode VEX.66.0F38 0xf5 - invalid. */
2986
2987/** Body for PDEP and PEXT (similar to ANDN, except no EFLAGS). */
2988#define IEMOP_BODY_Gy_By_Ey_NoEflags(a_Instr, a_fFeatureMember) \
2989 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2991 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2992 { \
2993 /* \
2994 * Register, register. \
2995 */ \
2996 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2997 { \
2998 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2999 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
3000 IEM_MC_ARG(uint64_t *, pDst, 0); \
3001 IEM_MC_ARG(uint64_t, uSrc1, 1); \
3002 IEM_MC_ARG(uint64_t, uSrc2, 2); \
3003 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
3004 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3005 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
3006 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
3007 iemAImpl_ ## a_Instr ## _u64, \
3008 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
3009 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3010 IEM_MC_END(); \
3011 } \
3012 else \
3013 { \
3014 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
3015 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
3016 IEM_MC_ARG(uint32_t *, pDst, 0); \
3017 IEM_MC_ARG(uint32_t, uSrc1, 1); \
3018 IEM_MC_ARG(uint32_t, uSrc2, 2); \
3019 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
3020 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3021 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
3022 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
3023 iemAImpl_ ## a_Instr ## _u32, \
3024 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
3025 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3026 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3027 IEM_MC_END(); \
3028 } \
3029 } \
3030 else \
3031 { \
3032 /* \
3033 * Register, memory. \
3034 */ \
3035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
3036 { \
3037 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
3038 IEM_MC_ARG(uint64_t *, pDst, 0); \
3039 IEM_MC_ARG(uint64_t, uSrc1, 1); \
3040 IEM_MC_ARG(uint64_t, uSrc2, 2); \
3041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3043 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
3044 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3045 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
3046 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
3047 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
3048 iemAImpl_ ## a_Instr ## _u64, \
3049 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
3050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3051 IEM_MC_END(); \
3052 } \
3053 else \
3054 { \
3055 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
3056 IEM_MC_ARG(uint32_t *, pDst, 0); \
3057 IEM_MC_ARG(uint32_t, uSrc1, 1); \
3058 IEM_MC_ARG(uint32_t, uSrc2, 2); \
3059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3061 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
3062 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3063 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
3064 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
3065 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
3066 iemAImpl_ ## a_Instr ## _u32, \
3067 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
3068 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3069 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3070 IEM_MC_END(); \
3071 } \
3072 } \
3073 (void)0
3074
3075
3076/** Opcode VEX.F3.0F38 0xf5 (vex only). */
3077FNIEMOP_DEF(iemOp_pext_Gy_By_Ey)
3078{
3079 IEMOP_MNEMONIC3(VEX_RVM, PEXT, pext, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3080 IEMOP_BODY_Gy_By_Ey_NoEflags(pext, fBmi2);
3081}
3082
3083
3084/** Opcode VEX.F2.0F38 0xf5 (vex only). */
3085FNIEMOP_DEF(iemOp_pdep_Gy_By_Ey)
3086{
3087 IEMOP_MNEMONIC3(VEX_RVM, PDEP, pdep, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3088 IEMOP_BODY_Gy_By_Ey_NoEflags(pdep, fBmi2);
3089}
3090
3091
3092/* Opcode VEX.0F38 0xf6 - invalid. */
3093/* Opcode VEX.66.0F38 0xf6 - invalid (legacy only). */
3094/* Opcode VEX.F3.0F38 0xf6 - invalid (legacy only). */
3095
3096
3097/**
3098 * @opcode 0xf6
3099 * @oppfx 0xf2
3100 * @opflclass unchanged
3101 */
3102FNIEMOP_DEF(iemOp_mulx_By_Gy_rDX_Ey)
3103{
3104 IEMOP_MNEMONIC4(VEX_RVM, MULX, mulx, Gy, By, Ey, rDX, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3105 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
3106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3107 if (IEM_IS_MODRM_REG_MODE(bRm))
3108 {
3109 /*
3110 * Register, register.
3111 */
3112 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3113 {
3114 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3115 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
3116 IEM_MC_ARG(uint64_t *, pDst1, 0);
3117 IEM_MC_ARG(uint64_t *, pDst2, 1);
3118 IEM_MC_ARG(uint64_t, uSrc1, 2);
3119 IEM_MC_ARG(uint64_t, uSrc2, 3);
3120 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
3121 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
3122 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
3123 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3124 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
3125 pDst1, pDst2, uSrc1, uSrc2);
3126 IEM_MC_ADVANCE_RIP_AND_FINISH();
3127 IEM_MC_END();
3128 }
3129 else
3130 {
3131 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3132 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
3133 IEM_MC_ARG(uint32_t *, pDst1, 0);
3134 IEM_MC_ARG(uint32_t *, pDst2, 1);
3135 IEM_MC_ARG(uint32_t, uSrc1, 2);
3136 IEM_MC_ARG(uint32_t, uSrc2, 3);
3137 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
3138 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
3139 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
3140 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3141 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
3142 pDst1, pDst2, uSrc1, uSrc2);
3143 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu));
3144 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
3145 IEM_MC_ADVANCE_RIP_AND_FINISH();
3146 IEM_MC_END();
3147 }
3148 }
3149 else
3150 {
3151 /*
3152 * Register, memory.
3153 */
3154 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3155 {
3156 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3157 IEM_MC_ARG(uint64_t *, pDst1, 0);
3158 IEM_MC_ARG(uint64_t *, pDst2, 1);
3159 IEM_MC_ARG(uint64_t, uSrc1, 2);
3160 IEM_MC_ARG(uint64_t, uSrc2, 3);
3161 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3163 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
3164 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3165 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
3166 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3167 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
3168 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
3169 pDst1, pDst2, uSrc1, uSrc2);
3170 IEM_MC_ADVANCE_RIP_AND_FINISH();
3171 IEM_MC_END();
3172 }
3173 else
3174 {
3175 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3176 IEM_MC_ARG(uint32_t *, pDst1, 0);
3177 IEM_MC_ARG(uint32_t *, pDst2, 1);
3178 IEM_MC_ARG(uint32_t, uSrc1, 2);
3179 IEM_MC_ARG(uint32_t, uSrc2, 3);
3180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3182 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
3183 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3184 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
3185 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3186 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
3187 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
3188 pDst1, pDst2, uSrc1, uSrc2);
3189 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu));
3190 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
3191 IEM_MC_ADVANCE_RIP_AND_FINISH();
3192 IEM_MC_END();
3193 }
3194 }
3195}
3196
3197
3198/**
3199 * @opcode 0xf7
3200 * @oppfx none
3201 * @opflmodify cf,pf,af,zf,sf,of
3202 * @opflclear cf,of
3203 * @opflundef pf,af,sf
3204 */
3205FNIEMOP_DEF(iemOp_bextr_Gy_Ey_By)
3206{
3207 IEMOP_MNEMONIC3(VEX_RMV, BEXTR, bextr, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3208 IEMOP_BODY_Gy_Ey_By(bextr, fBmi1, X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
3209}
3210
3211
3212/**
3213 * @opcode 0xf7
3214 * @oppfx 0x66
3215 * @opflclass unchanged
3216 */
3217FNIEMOP_DEF(iemOp_shlx_Gy_Ey_By)
3218{
3219 IEMOP_MNEMONIC3(VEX_RMV, SHLX, shlx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3220 IEMOP_BODY_Gy_Ey_By_NoEflags(shlx, fBmi2);
3221}
3222
3223
3224/**
3225 * @opcode 0xf7
3226 * @oppfx 0xf3
3227 * @opflclass unchanged
3228 */
3229FNIEMOP_DEF(iemOp_sarx_Gy_Ey_By)
3230{
3231 IEMOP_MNEMONIC3(VEX_RMV, SARX, sarx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3232 IEMOP_BODY_Gy_Ey_By_NoEflags(sarx, fBmi2);
3233}
3234
3235
3236/**
3237 * @opcode 0xf7
3238 * @oppfx 0xf2
3239 * @opflclass unchanged
3240 */
3241FNIEMOP_DEF(iemOp_shrx_Gy_Ey_By)
3242{
3243 IEMOP_MNEMONIC3(VEX_RMV, SHRX, shrx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3244 IEMOP_BODY_Gy_Ey_By_NoEflags(shrx, fBmi2);
3245}
3246
3247/* Opcode VEX.0F38 0xf8 - invalid. */
3248/* Opcode VEX.66.0F38 0xf8 - invalid. */
3249/* Opcode VEX.F3.0F38 0xf8 - invalid. */
3250/* Opcode VEX.F2.0F38 0xf8 - invalid. */
3251
3252/* Opcode VEX.0F38 0xf9 - invalid. */
3253/* Opcode VEX.66.0F38 0xf9 - invalid. */
3254/* Opcode VEX.F3.0F38 0xf9 - invalid. */
3255/* Opcode VEX.F2.0F38 0xf9 - invalid. */
3256
3257/* Opcode VEX.0F38 0xfa - invalid. */
3258/* Opcode VEX.66.0F38 0xfa - invalid. */
3259/* Opcode VEX.F3.0F38 0xfa - invalid. */
3260/* Opcode VEX.F2.0F38 0xfa - invalid. */
3261
3262/* Opcode VEX.0F38 0xfb - invalid. */
3263/* Opcode VEX.66.0F38 0xfb - invalid. */
3264/* Opcode VEX.F3.0F38 0xfb - invalid. */
3265/* Opcode VEX.F2.0F38 0xfb - invalid. */
3266
3267/* Opcode VEX.0F38 0xfc - invalid. */
3268/* Opcode VEX.66.0F38 0xfc - invalid. */
3269/* Opcode VEX.F3.0F38 0xfc - invalid. */
3270/* Opcode VEX.F2.0F38 0xfc - invalid. */
3271
3272/* Opcode VEX.0F38 0xfd - invalid. */
3273/* Opcode VEX.66.0F38 0xfd - invalid. */
3274/* Opcode VEX.F3.0F38 0xfd - invalid. */
3275/* Opcode VEX.F2.0F38 0xfd - invalid. */
3276
3277/* Opcode VEX.0F38 0xfe - invalid. */
3278/* Opcode VEX.66.0F38 0xfe - invalid. */
3279/* Opcode VEX.F3.0F38 0xfe - invalid. */
3280/* Opcode VEX.F2.0F38 0xfe - invalid. */
3281
3282/* Opcode VEX.0F38 0xff - invalid. */
3283/* Opcode VEX.66.0F38 0xff - invalid. */
3284/* Opcode VEX.F3.0F38 0xff - invalid. */
3285/* Opcode VEX.F2.0F38 0xff - invalid. */
3286
3287
3288/**
3289 * VEX opcode map \#2.
3290 *
3291 * @sa g_apfnThreeByte0f38
3292 */
3293const PFNIEMOP g_apfnVexMap2[] =
3294{
3295 /* no prefix, 066h prefix f3h prefix, f2h prefix */
3296 /* 0x00 */ iemOp_InvalidNeedRM, iemOp_vpshufb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3297 /* 0x01 */ iemOp_InvalidNeedRM, iemOp_vphaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3298 /* 0x02 */ iemOp_InvalidNeedRM, iemOp_vphaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3299 /* 0x03 */ iemOp_InvalidNeedRM, iemOp_vphaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3300 /* 0x04 */ iemOp_InvalidNeedRM, iemOp_vpmaddubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3301 /* 0x05 */ iemOp_InvalidNeedRM, iemOp_vphsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3302 /* 0x06 */ iemOp_InvalidNeedRM, iemOp_vphsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3303 /* 0x07 */ iemOp_InvalidNeedRM, iemOp_vphsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3304 /* 0x08 */ iemOp_InvalidNeedRM, iemOp_vpsignb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3305 /* 0x09 */ iemOp_InvalidNeedRM, iemOp_vpsignw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3306 /* 0x0a */ iemOp_InvalidNeedRM, iemOp_vpsignd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3307 /* 0x0b */ iemOp_InvalidNeedRM, iemOp_vpmulhrsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3308 /* 0x0c */ iemOp_InvalidNeedRM, iemOp_vpermilps_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3309 /* 0x0d */ iemOp_InvalidNeedRM, iemOp_vpermilpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3310 /* 0x0e */ iemOp_InvalidNeedRM, iemOp_vtestps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3311 /* 0x0f */ iemOp_InvalidNeedRM, iemOp_vtestpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3312
3313 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRM),
3314 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
3315 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
3316 /* 0x13 */ iemOp_InvalidNeedRM, iemOp_vcvtph2ps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3317 /* 0x14 */ IEMOP_X4(iemOp_InvalidNeedRM),
3318 /* 0x15 */ IEMOP_X4(iemOp_InvalidNeedRM),
3319 /* 0x16 */ iemOp_InvalidNeedRM, iemOp_vpermps_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3320 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_vptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3321 /* 0x18 */ iemOp_InvalidNeedRM, iemOp_vbroadcastss_Vx_Wd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3322 /* 0x19 */ iemOp_InvalidNeedRM, iemOp_vbroadcastsd_Vqq_Wq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3323 /* 0x1a */ iemOp_InvalidNeedRM, iemOp_vbroadcastf128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3324 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
3325 /* 0x1c */ iemOp_InvalidNeedRM, iemOp_vpabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3326 /* 0x1d */ iemOp_InvalidNeedRM, iemOp_vpabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3327 /* 0x1e */ iemOp_InvalidNeedRM, iemOp_vpabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3328 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
3329
3330 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3331 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3332 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3333 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3334 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3335 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_vpmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3336 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
3337 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
3338 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_vpmuldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3339 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3340 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_vmovntdqa_Vx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3341 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_vpackusdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3342 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3343 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3344 /* 0x2e */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3345 /* 0x2f */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3346
3347 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3348 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3349 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3350 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3351 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3352 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_vpmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3353 /* 0x36 */ iemOp_InvalidNeedRM, iemOp_vpermd_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3354 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3355 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_vpminsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3356 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_vpminsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3357 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_vpminuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3358 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_vpminud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3359 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_vpmaxsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3360 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_vpmaxsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3361 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_vpmaxuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3362 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_vpmaxud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3363
3364 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_vpmulld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3365 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_vphminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3366 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
3367 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
3368 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
3369 /* 0x45 */ iemOp_InvalidNeedRM, iemOp_vpsrlvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3370 /* 0x46 */ iemOp_InvalidNeedRM, iemOp_vpsravd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3371 /* 0x47 */ iemOp_InvalidNeedRM, iemOp_vpsllvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3372 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
3373 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
3374 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
3375 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
3376 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
3377 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
3378 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
3379 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
3380
3381 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
3382 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
3383 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
3384 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
3385 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
3386 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
3387 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
3388 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
3389 /* 0x58 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3390 /* 0x59 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3391 /* 0x5a */ iemOp_InvalidNeedRM, iemOp_vbroadcasti128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3392 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
3393 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
3394 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
3395 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
3396 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
3397
3398 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
3399 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
3400 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
3401 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
3402 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
3403 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
3404 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
3405 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
3406 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
3407 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
3408 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
3409 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
3410 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
3411 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
3412 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
3413 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
3414
3415 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
3416 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
3417 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
3418 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
3419 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
3420 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
3421 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
3422 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
3423 /* 0x78 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3424 /* 0x79 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3425 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
3426 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
3427 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
3428 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
3429 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
3430 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
3431
3432 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
3433 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
3434 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
3435 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
3436 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
3437 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
3438 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
3439 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
3440 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
3441 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
3442 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
3443 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
3444 /* 0x8c */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3445 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
3446 /* 0x8e */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Mx_Vx_Hx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3447 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
3448
3449 /* 0x90 */ iemOp_InvalidNeedRM, iemOp_vpgatherdd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3450 /* 0x91 */ iemOp_InvalidNeedRM, iemOp_vpgatherqd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3451 /* 0x92 */ iemOp_InvalidNeedRM, iemOp_vgatherdps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3452 /* 0x93 */ iemOp_InvalidNeedRM, iemOp_vgatherqps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3453 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
3454 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
3455 /* 0x96 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3456 /* 0x97 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3457 /* 0x98 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3458 /* 0x99 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3459 /* 0x9a */ iemOp_InvalidNeedRM, iemOp_vfmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3460 /* 0x9b */ iemOp_InvalidNeedRM, iemOp_vfmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3461 /* 0x9c */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3462 /* 0x9d */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3463 /* 0x9e */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3464 /* 0x9f */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3465
3466 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3467 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3468 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3469 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3470 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3471 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3472 /* 0xa6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3473 /* 0xa7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3474 /* 0xa8 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3475 /* 0xa9 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3476 /* 0xaa */ iemOp_InvalidNeedRM, iemOp_vfmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3477 /* 0xab */ iemOp_InvalidNeedRM, iemOp_vfmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3478 /* 0xac */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3479 /* 0xad */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3480 /* 0xae */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3481 /* 0xaf */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3482
3483 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3484 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3485 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3486 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3487 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3488 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3489 /* 0xb6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3490 /* 0xb7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3491 /* 0xb8 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3492 /* 0xb9 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3493 /* 0xba */ iemOp_InvalidNeedRM, iemOp_vfmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3494 /* 0xbb */ iemOp_InvalidNeedRM, iemOp_vfmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3495 /* 0xbc */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3496 /* 0xbd */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3497 /* 0xbe */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3498 /* 0xbf */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3499
3500 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3501 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3502 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3503 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3504 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3505 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3506 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3507 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3508 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3509 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3510 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
3511 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
3512 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
3513 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
3514 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
3515 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
3516
3517 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3518 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3519 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3520 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3521 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3522 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3523 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3524 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3525 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3526 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3527 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
3528 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vaesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3529 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vaesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3530 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vaesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3531 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vaesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3532 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vaesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3533
3534 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3535 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3536 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3537 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3538 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3539 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3540 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3541 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3542 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3543 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3544 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
3545 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
3546 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
3547 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
3548 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
3549 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
3550
3551 /* 0xf0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3552 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3553 /* 0xf2 */ iemOp_andn_Gy_By_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3554 /* 0xf3 */ iemOp_VGrp17_f3, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3555 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3556 /* 0xf5 */ iemOp_bzhi_Gy_Ey_By, iemOp_InvalidNeedRM, iemOp_pext_Gy_By_Ey, iemOp_pdep_Gy_By_Ey,
3557 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_mulx_By_Gy_rDX_Ey,
3558 /* 0xf7 */ iemOp_bextr_Gy_Ey_By, iemOp_shlx_Gy_Ey_By, iemOp_sarx_Gy_Ey_By, iemOp_shrx_Gy_Ey_By,
3559 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3560 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3561 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
3562 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
3563 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
3564 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
3565 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
3566 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
3567};
3568AssertCompile(RT_ELEMENTS(g_apfnVexMap2) == 1024);
3569
3570/** @} */
3571
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette