VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap2.cpp.h@ 105231

Last change on this file since 105231 was 104784, checked in by vboxsync, 6 months ago

VMM/IEM: Add helper to ignore the VEX.W prefix for non 64-bit code as required by some instructions, fixes some bootsector testcases, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 135.1 KB
Line 
1/* $Id: IEMAllInstVexMap2.cpp.h 104784 2024-05-26 17:45:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstThree0f38.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 2
33 * @{
34 */
35
36/* Opcode VEX.0F38 0x00 - invalid. */
37
38
39/** Opcode VEX.66.0F38 0x00. */
40FNIEMOP_DEF(iemOp_vpshufb_Vx_Hx_Wx)
41{
42 IEMOP_MNEMONIC3(VEX_RVM, VPSHUFB, vpshufb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
43 IEMOPMEDIAOPTF3_INIT_VARS( vpshufb);
44 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
45}
46
47
48/* Opcode VEX.0F38 0x01 - invalid. */
49
50
51/** Opcode VEX.66.0F38 0x01. */
52FNIEMOP_DEF(iemOp_vphaddw_Vx_Hx_Wx)
53{
54 IEMOP_MNEMONIC3(VEX_RVM, VPHADDW, vphaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
55 IEMOPMEDIAOPTF3_INIT_VARS(vphaddw);
56 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
57}
58
59
60/* Opcode VEX.0F38 0x02 - invalid. */
61
62
63/** Opcode VEX.66.0F38 0x02. */
64FNIEMOP_DEF(iemOp_vphaddd_Vx_Hx_Wx)
65{
66 IEMOP_MNEMONIC3(VEX_RVM, VPHADDD, vphaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
67 IEMOPMEDIAOPTF3_INIT_VARS(vphaddd);
68 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
69}
70
71
72/* Opcode VEX.0F38 0x03 - invalid. */
73
74
75/** Opcode VEX.66.0F38 0x03. */
76FNIEMOP_DEF(iemOp_vphaddsw_Vx_Hx_Wx)
77{
78 IEMOP_MNEMONIC3(VEX_RVM, VPHADDSW, vphaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
79 IEMOPMEDIAOPTF3_INIT_VARS(vphaddsw);
80 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
81}
82
83
84/* Opcode VEX.0F38 0x04 - invalid. */
85
86
87/** Opcode VEX.66.0F38 0x04. */
88FNIEMOP_DEF(iemOp_vpmaddubsw_Vx_Hx_Wx)
89{
90 IEMOP_MNEMONIC3(VEX_RVM, VPMADDUBSW, vpmaddubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
91 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddubsw);
92 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
93}
94
95
96/* Opcode VEX.0F38 0x05 - invalid. */
97
98
99/** Opcode VEX.66.0F38 0x05. */
100FNIEMOP_DEF(iemOp_vphsubw_Vx_Hx_Wx)
101{
102 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBW, vphsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
103 IEMOPMEDIAOPTF3_INIT_VARS(vphsubw);
104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
105}
106
107
108/* Opcode VEX.0F38 0x06 - invalid. */
109
110
111/** Opcode VEX.66.0F38 0x06. */
112FNIEMOP_DEF(iemOp_vphsubd_Vx_Hx_Wx)
113{
114 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBD, vphsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
115 IEMOPMEDIAOPTF3_INIT_VARS(vphsubd);
116 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
117}
118
119
120/* Opcode VEX.0F38 0x07 - invalid. */
121
122
123/** Opcode VEX.66.0F38 0x07. */
124FNIEMOP_DEF(iemOp_vphsubsw_Vx_Hx_Wx)
125{
126 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBSW, vphsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
127 IEMOPMEDIAOPTF3_INIT_VARS(vphsubsw);
128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
129}
130
131
132/* Opcode VEX.0F38 0x08 - invalid. */
133
134
135/** Opcode VEX.66.0F38 0x08. */
136FNIEMOP_DEF(iemOp_vpsignb_Vx_Hx_Wx)
137{
138 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNB, vpsignb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
139 IEMOPMEDIAOPTF3_INIT_VARS(vpsignb);
140 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
141}
142
143
144/* Opcode VEX.0F38 0x09 - invalid. */
145
146
147/** Opcode VEX.66.0F38 0x09. */
148FNIEMOP_DEF(iemOp_vpsignw_Vx_Hx_Wx)
149{
150 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNW, vpsignw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
151 IEMOPMEDIAOPTF3_INIT_VARS(vpsignw);
152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
153}
154
155
156/* Opcode VEX.0F38 0x0a - invalid. */
157
158
159/** Opcode VEX.66.0F38 0x0a. */
160FNIEMOP_DEF(iemOp_vpsignd_Vx_Hx_Wx)
161{
162 IEMOP_MNEMONIC3(VEX_RVM, VPSIGND, vpsignd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
163 IEMOPMEDIAOPTF3_INIT_VARS(vpsignd);
164 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
165}
166
167
168/* Opcode VEX.0F38 0x0b - invalid. */
169
170
171/** Opcode VEX.66.0F38 0x0b. */
172FNIEMOP_DEF(iemOp_vpmulhrsw_Vx_Hx_Wx)
173{
174 IEMOP_MNEMONIC3(VEX_RVM, VPMULHRSW, vpmulhrsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
175 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhrsw);
176 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
177}
178
179
180/* Opcode VEX.0F38 0x0c - invalid. */
181
182
183/** Opcode VEX.66.0F38 0x0c.
184 * AVX,AVX */
185FNIEMOP_DEF(iemOp_vpermilps_Vx_Hx_Wx)
186{
187 IEMOP_MNEMONIC3(VEX_RVM, VPERMILPS, vpermilps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
188 IEMOPMEDIAOPTF3_INIT_VARS(vpermilps);
189 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
190}
191
192
193/* Opcode VEX.0F38 0x0d - invalid. */
194
195
196/** Opcode VEX.66.0F38 0x0d.
197 * AVX,AVX */
198FNIEMOP_DEF(iemOp_vpermilpd_Vx_Hx_Wx)
199{
200 IEMOP_MNEMONIC3(VEX_RVM, VPERMILPD, vpermilpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
201 IEMOPMEDIAOPTF3_INIT_VARS(vpermilpd);
202 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
203}
204
205
206/**
207 * Common worker for AVX instructions on the forms:
208 * - vtestps/d xmm1, xmm2/mem128
209 * - vtestps/d ymm1, ymm2/mem256
210 *
211 * Takes function table for function w/o implicit state parameter.
212 *
213 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
214 */
215#define IEMOP_BODY_VTESTP_S_D(a_Instr) \
216 Assert(pVCpu->iem.s.uVexLength <= 1); \
217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
218 if (IEM_IS_MODRM_REG_MODE(bRm)) \
219 { \
220 /* \
221 * Register, register. \
222 */ \
223 if (pVCpu->iem.s.uVexLength) \
224 { \
225 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
226 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
227 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
228 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
229 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0); \
230 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1); \
231 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
232 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
233 IEM_MC_PREPARE_AVX_USAGE(); \
234 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u256, \
238 iemAImpl_ ## a_Instr ## _u256_fallback), \
239 puSrc1, puSrc2, pEFlags); \
240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
241 IEM_MC_END(); \
242 } \
243 else \
244 { \
245 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
246 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
247 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0); \
248 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1); \
249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
250 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
251 IEM_MC_PREPARE_AVX_USAGE(); \
252 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
253 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
254 IEM_MC_REF_EFLAGS(pEFlags); \
255 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u128, \
256 iemAImpl_ ## a_Instr ## _u128_fallback), \
257 puSrc1, puSrc2, pEFlags); \
258 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
259 IEM_MC_END(); \
260 } \
261 } \
262 else \
263 { \
264 /* \
265 * Register, memory. \
266 */ \
267 if (pVCpu->iem.s.uVexLength) \
268 { \
269 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
270 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
271 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
273 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0); \
274 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1); \
275 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
277 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
278 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
279 IEM_MC_PREPARE_AVX_USAGE(); \
280 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
281 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u256, \
284 iemAImpl_ ## a_Instr ## _u256_fallback), \
285 puSrc1, puSrc2, pEFlags); \
286 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
287 IEM_MC_END(); \
288 } \
289 else \
290 { \
291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
292 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
294 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0); \
295 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1); \
296 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
298 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
300 IEM_MC_PREPARE_AVX_USAGE(); \
301 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
302 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
303 IEM_MC_REF_EFLAGS(pEFlags); \
304 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u128, \
305 iemAImpl_ ## a_Instr ## _u128_fallback), \
306 puSrc1, puSrc2, pEFlags); \
307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
308 IEM_MC_END(); \
309 } \
310 } \
311 (void)0
312
313
314/* Opcode VEX.0F38 0x0e - invalid. */
315
316
317/**
318 * @opcode 0x0e
319 * @oppfx 0x66
320 * @opflmodify cf,zf,pf,af,sf,of
321 * @opflclear pf,af,sf,of
322 */
323FNIEMOP_DEF(iemOp_vtestps_Vx_Wx)
324{
325 /** @todo We need to check VEX.W somewhere... it is documented to \#UD on all
326 * CPU modes. */
327 IEMOP_MNEMONIC2(VEX_RM, VTESTPS, vtestps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_W_ZERO);
328 IEMOP_BODY_VTESTP_S_D(vtestps);
329}
330
331
332/* Opcode VEX.0F38 0x0f - invalid. */
333
334
335/**
336 * @opcode 0x0f
337 * @oppfx 0x66
338 * @opflmodify cf,zf,pf,af,sf,of
339 * @opflclear pf,af,sf,of
340 */
341FNIEMOP_DEF(iemOp_vtestpd_Vx_Wx)
342{
343 /** @todo We need to check VEX.W somewhere... it is documented to \#UD on all
344 * CPU modes. */
345 IEMOP_MNEMONIC2(VEX_RM, VTESTPD, vtestpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_W_ZERO);
346 IEMOP_BODY_VTESTP_S_D(vtestpd);
347}
348
349
350/* Opcode VEX.0F38 0x10 - invalid */
351/* Opcode VEX.66.0F38 0x10 - invalid (legacy only). */
352/* Opcode VEX.0F38 0x11 - invalid */
353/* Opcode VEX.66.0F38 0x11 - invalid */
354/* Opcode VEX.0F38 0x12 - invalid */
355/* Opcode VEX.66.0F38 0x12 - invalid */
356/* Opcode VEX.0F38 0x13 - invalid */
357/* Opcode VEX.66.0F38 0x13 (vex only). */
358FNIEMOP_STUB(iemOp_vcvtph2ps_Vx_Wx);
359/* Opcode VEX.0F38 0x14 - invalid */
360/* Opcode VEX.66.0F38 0x14 - invalid (legacy only). */
361/* Opcode VEX.0F38 0x15 - invalid */
362/* Opcode VEX.66.0F38 0x15 - invalid (legacy only). */
363/* Opcode VEX.0F38 0x16 - invalid */
364/** Opcode VEX.66.0F38 0x16. */
365FNIEMOP_STUB(iemOp_vpermps_Vqq_Hqq_Wqq);
366/* Opcode VEX.0F38 0x17 - invalid */
367
368
369/**
370 * @opcode 0x17
371 * @oppfx 0x66
372 * @opflmodify cf,pf,af,zf,sf,of
373 * @opflclear pf,af,sf,of
374 */
375FNIEMOP_DEF(iemOp_vptest_Vx_Wx)
376{
377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
378 if (IEM_IS_MODRM_REG_MODE(bRm))
379 {
380 /*
381 * Register, register.
382 */
383 if (pVCpu->iem.s.uVexLength)
384 {
385 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
386 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
387 IEM_MC_LOCAL(RTUINT256U, uSrc1);
388 IEM_MC_LOCAL(RTUINT256U, uSrc2);
389 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
390 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
391 IEM_MC_ARG(uint32_t *, pEFlags, 2);
392 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
393 IEM_MC_PREPARE_AVX_USAGE();
394 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
395 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
396 IEM_MC_REF_EFLAGS(pEFlags);
397 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
398 puSrc1, puSrc2, pEFlags);
399 IEM_MC_ADVANCE_RIP_AND_FINISH();
400 IEM_MC_END();
401 }
402 else
403 {
404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
405 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
406 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
407 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
408 IEM_MC_ARG(uint32_t *, pEFlags, 2);
409 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
410 IEM_MC_PREPARE_AVX_USAGE();
411 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
412 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
413 IEM_MC_REF_EFLAGS(pEFlags);
414 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
415 IEM_MC_ADVANCE_RIP_AND_FINISH();
416 IEM_MC_END();
417 }
418 }
419 else
420 {
421 /*
422 * Register, memory.
423 */
424 if (pVCpu->iem.s.uVexLength)
425 {
426 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
427 IEM_MC_LOCAL(RTUINT256U, uSrc1);
428 IEM_MC_LOCAL(RTUINT256U, uSrc2);
429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
430 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
431 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
432 IEM_MC_ARG(uint32_t *, pEFlags, 2);
433
434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
435 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
437 IEM_MC_PREPARE_AVX_USAGE();
438
439 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
440 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
441 IEM_MC_REF_EFLAGS(pEFlags);
442 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
443 puSrc1, puSrc2, pEFlags);
444
445 IEM_MC_ADVANCE_RIP_AND_FINISH();
446 IEM_MC_END();
447 }
448 else
449 {
450 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
451 IEM_MC_LOCAL(RTUINT128U, uSrc2);
452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
453 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
454 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
455 IEM_MC_ARG(uint32_t *, pEFlags, 2);
456
457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
458 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
459 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
460 IEM_MC_PREPARE_AVX_USAGE();
461
462 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
463 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
466
467 IEM_MC_ADVANCE_RIP_AND_FINISH();
468 IEM_MC_END();
469 }
470 }
471}
472
473
474/* Opcode VEX.0F38 0x18 - invalid */
475
476
477/** Opcode VEX.66.0F38 0x18. */
478FNIEMOP_DEF(iemOp_vbroadcastss_Vx_Wd)
479{
480 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSS, vbroadcastss, Vx, Wx, DISOPTYPE_HARMLESS, 0);
481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
482 if (IEM_IS_MODRM_REG_MODE(bRm))
483 {
484 /*
485 * Register, register.
486 */
487 if (pVCpu->iem.s.uVexLength)
488 {
489 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
491 IEM_MC_LOCAL(uint32_t, uSrc);
492
493 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
494 IEM_MC_PREPARE_AVX_USAGE();
495
496 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
497 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
498
499 IEM_MC_ADVANCE_RIP_AND_FINISH();
500 IEM_MC_END();
501 }
502 else
503 {
504 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
506 IEM_MC_LOCAL(uint32_t, uSrc);
507
508 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
509 IEM_MC_PREPARE_AVX_USAGE();
510 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
511 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
512
513 IEM_MC_ADVANCE_RIP_AND_FINISH();
514 IEM_MC_END();
515 }
516 }
517 else
518 {
519 /*
520 * Register, memory.
521 */
522 if (pVCpu->iem.s.uVexLength)
523 {
524 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
525 IEM_MC_LOCAL(uint32_t, uSrc);
526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
527
528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
529 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
530 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
531 IEM_MC_PREPARE_AVX_USAGE();
532
533 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
534 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
535
536 IEM_MC_ADVANCE_RIP_AND_FINISH();
537 IEM_MC_END();
538 }
539 else
540 {
541 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
542 IEM_MC_LOCAL(uint32_t, uSrc);
543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
544
545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
546 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
548 IEM_MC_PREPARE_AVX_USAGE();
549
550 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
551 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
552
553 IEM_MC_ADVANCE_RIP_AND_FINISH();
554 IEM_MC_END();
555 }
556 }
557}
558
559
560/* Opcode VEX.0F38 0x19 - invalid */
561
562
563/** Opcode VEX.66.0F38 0x19. */
564FNIEMOP_DEF(iemOp_vbroadcastsd_Vqq_Wq)
565{
566 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSD, vbroadcastsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
568 if (IEM_IS_MODRM_REG_MODE(bRm))
569 {
570 /*
571 * Register, register.
572 */
573 if (pVCpu->iem.s.uVexLength)
574 {
575 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
576 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
577 IEM_MC_LOCAL(uint64_t, uSrc);
578
579 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
580 IEM_MC_PREPARE_AVX_USAGE();
581
582 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
583 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
584
585 IEM_MC_ADVANCE_RIP_AND_FINISH();
586 IEM_MC_END();
587 }
588 else
589 {
590 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
591 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
592 IEM_MC_LOCAL(uint64_t, uSrc);
593
594 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
595 IEM_MC_PREPARE_AVX_USAGE();
596 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
597 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 }
603 else
604 {
605 /*
606 * Register, memory.
607 */
608 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
609 IEM_MC_LOCAL(uint64_t, uSrc);
610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
611
612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
613 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
614 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
615 IEM_MC_PREPARE_AVX_USAGE();
616
617 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
618 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
619
620 IEM_MC_ADVANCE_RIP_AND_FINISH();
621 IEM_MC_END();
622 }
623}
624
625
626/* Opcode VEX.0F38 0x1a - invalid */
627
628
629/** Opcode VEX.66.0F38 0x1a. */
630FNIEMOP_DEF(iemOp_vbroadcastf128_Vqq_Mdq)
631{
632 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTF128, vbroadcastf128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
634 if (IEM_IS_MODRM_REG_MODE(bRm))
635 {
636 /*
637 * No register, register.
638 */
639 IEMOP_RAISE_INVALID_OPCODE_RET();
640 }
641 else
642 {
643 /*
644 * Register, memory.
645 */
646 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
647 IEM_MC_LOCAL(RTUINT128U, uSrc);
648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
649
650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
651 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
652 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
653 IEM_MC_PREPARE_AVX_USAGE();
654
655 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
656 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
657
658 IEM_MC_ADVANCE_RIP_AND_FINISH();
659 IEM_MC_END();
660 }
661}
662
663
664/* Opcode VEX.0F38 0x1b - invalid */
665/* Opcode VEX.66.0F38 0x1b - invalid */
666/* Opcode VEX.0F38 0x1c - invalid. */
667
668
669/** Opcode VEX.66.0F38 0x1c. */
670FNIEMOP_DEF(iemOp_vpabsb_Vx_Wx)
671{
672 IEMOP_MNEMONIC2(VEX_RM, VPABSB, vpabsb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
673 IEMOPMEDIAOPTF2_INIT_VARS(vpabsb);
674 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
675}
676
677
678/* Opcode VEX.0F38 0x1d - invalid. */
679
680
681/** Opcode VEX.66.0F38 0x1d. */
682FNIEMOP_DEF(iemOp_vpabsw_Vx_Wx)
683{
684 IEMOP_MNEMONIC2(VEX_RM, VPABSW, vpabsw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
685 IEMOPMEDIAOPTF2_INIT_VARS(vpabsw);
686 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
687}
688
689/* Opcode VEX.0F38 0x1e - invalid. */
690
691
692/** Opcode VEX.66.0F38 0x1e. */
693FNIEMOP_DEF(iemOp_vpabsd_Vx_Wx)
694{
695 IEMOP_MNEMONIC2(VEX_RM, VPABSD, vpabsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
696 IEMOPMEDIAOPTF2_INIT_VARS(vpabsd);
697 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
698}
699
700
701/* Opcode VEX.0F38 0x1f - invalid */
702/* Opcode VEX.66.0F38 0x1f - invalid */
703
704
705/** Body for the vpmov{s,z}x* instructions. */
706#define IEMOP_BODY_VPMOV_S_Z(a_Instr, a_SrcWidth, a_VexLengthMemFetch) \
707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
708 if (IEM_IS_MODRM_REG_MODE(bRm)) \
709 { \
710 /* \
711 * Register, register. \
712 */ \
713 if (pVCpu->iem.s.uVexLength) \
714 { \
715 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
716 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
717 IEM_MC_LOCAL(RTUINT256U, uDst); \
718 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
719 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
720 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
721 IEM_MC_PREPARE_AVX_USAGE(); \
722 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
723 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
724 iemAImpl_ ## a_Instr ## _u256_fallback), \
725 puDst, puSrc); \
726 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
727 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
728 IEM_MC_END(); \
729 } \
730 else \
731 { \
732 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
733 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
734 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
735 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
736 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
737 IEM_MC_PREPARE_AVX_USAGE(); \
738 IEM_MC_FETCH_XREG_U ## a_SrcWidth (uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); \
739 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
740 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
741 iemAImpl_## a_Instr ## _u128_fallback), \
742 puDst, uSrc); \
743 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
744 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
745 IEM_MC_END(); \
746 } \
747 } \
748 else \
749 { \
750 /* \
751 * Register, memory. \
752 */ \
753 if (pVCpu->iem.s.uVexLength) \
754 { \
755 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
756 IEM_MC_LOCAL(RTUINT256U, uDst); \
757 IEM_MC_LOCAL(RTUINT128U, uSrc); \
758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
759 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
760 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
762 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
763 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
764 IEM_MC_PREPARE_AVX_USAGE(); \
765 a_VexLengthMemFetch(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
766 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
767 iemAImpl_ ## a_Instr ## _u256_fallback), \
768 puDst, puSrc); \
769 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
770 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
771 IEM_MC_END(); \
772 } \
773 else \
774 { \
775 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
777 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
778 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
780 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
781 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
782 IEM_MC_PREPARE_AVX_USAGE(); \
783 IEM_MC_FETCH_MEM_U ## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
784 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
785 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
786 iemAImpl_ ## a_Instr ## _u128_fallback), \
787 puDst, uSrc); \
788 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
789 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
790 IEM_MC_END(); \
791 } \
792 } \
793 (void)0
794
795/** Opcode VEX.66.0F38 0x20. */
796FNIEMOP_DEF(iemOp_vpmovsxbw_Vx_UxMq)
797{
798 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
799 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBW, vpmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
800 IEMOP_BODY_VPMOV_S_Z(vpmovsxbw, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
801}
802
803
804/** Opcode VEX.66.0F38 0x21. */
805FNIEMOP_DEF(iemOp_vpmovsxbd_Vx_UxMd)
806{
807 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
808 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBD, vpmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
809 IEMOP_BODY_VPMOV_S_Z(vpmovsxbd, 32, IEM_MC_FETCH_MEM_U128);
810}
811
812
813/** Opcode VEX.66.0F38 0x22. */
814FNIEMOP_DEF(iemOp_vpmovsxbq_Vx_UxMw)
815{
816 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
817 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBQ, vpmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
818 IEMOP_BODY_VPMOV_S_Z(vpmovsxbq, 16, IEM_MC_FETCH_MEM_U128);
819}
820
821
822/** Opcode VEX.66.0F38 0x23. */
823FNIEMOP_DEF(iemOp_vpmovsxwd_Vx_UxMq)
824{
825 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
826 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWD, vpmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
827 IEMOP_BODY_VPMOV_S_Z(vpmovsxwd, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
828}
829
830
831/** Opcode VEX.66.0F38 0x24. */
832FNIEMOP_DEF(iemOp_vpmovsxwq_Vx_UxMd)
833{
834 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
835 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWQ, vpmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
836 IEMOP_BODY_VPMOV_S_Z(vpmovsxwq, 32, IEM_MC_FETCH_MEM_U128);
837}
838
839
840/** Opcode VEX.66.0F38 0x25. */
841FNIEMOP_DEF(iemOp_vpmovsxdq_Vx_UxMq)
842{
843 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
844 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXDQ, vpmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
845 IEMOP_BODY_VPMOV_S_Z(vpmovsxdq, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
846}
847
848
849/* Opcode VEX.66.0F38 0x26 - invalid */
850/* Opcode VEX.66.0F38 0x27 - invalid */
851
852
853/** Opcode VEX.66.0F38 0x28. */
854FNIEMOP_DEF(iemOp_vpmuldq_Vx_Hx_Wx)
855{
856 IEMOP_MNEMONIC3(VEX_RVM, VPMULDQ, vpmuldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
857 IEMOPMEDIAOPTF3_INIT_VARS(vpmuldq);
858 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
859}
860
861
862/** Opcode VEX.66.0F38 0x29. */
863FNIEMOP_DEF(iemOp_vpcmpeqq_Vx_Hx_Wx)
864{
865 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQQ, vpcmpeqq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
866 IEMOPMEDIAOPTF3_INIT_VARS(vpcmpeqq);
867 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
868}
869
870
871FNIEMOP_DEF(iemOp_vmovntdqa_Vx_Mx)
872{
873 Assert(pVCpu->iem.s.uVexLength <= 1);
874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
875 if (IEM_IS_MODRM_MEM_MODE(bRm))
876 {
877 if (pVCpu->iem.s.uVexLength == 0)
878 {
879 /**
880 * @opcode 0x2a
881 * @opcodesub !11 mr/reg vex.l=0
882 * @oppfx 0x66
883 * @opcpuid avx
884 * @opgroup og_avx_cachect
885 * @opxcpttype 1
886 * @optest op1=-1 op2=2 -> op1=2
887 * @optest op1=0 op2=-42 -> op1=-42
888 */
889 /* 128-bit: Memory, register. */
890 IEMOP_MNEMONIC2EX(vmovntdqa_Vdq_WO_Mdq_L0, "vmovntdqa, Vdq_WO, Mdq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
891 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
892 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
893 IEM_MC_LOCAL(RTUINT128U, uSrc);
894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
895
896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
897 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
898 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
899 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
900
901 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
902 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
903
904 IEM_MC_ADVANCE_RIP_AND_FINISH();
905 IEM_MC_END();
906 }
907 else
908 {
909 /**
910 * @opdone
911 * @opcode 0x2a
912 * @opcodesub !11 mr/reg vex.l=1
913 * @oppfx 0x66
914 * @opcpuid avx2
915 * @opgroup og_avx2_cachect
916 * @opxcpttype 1
917 * @optest op1=-1 op2=2 -> op1=2
918 * @optest op1=0 op2=-42 -> op1=-42
919 */
920 /* 256-bit: Memory, register. */
921 IEMOP_MNEMONIC2EX(vmovntdqa_Vqq_WO_Mqq_L1, "vmovntdqa, Vqq_WO,Mqq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
922 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
923 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
924 IEM_MC_LOCAL(RTUINT256U, uSrc);
925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
926
927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
928 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
929 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
930 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
931
932 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
933 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
934
935 IEM_MC_ADVANCE_RIP_AND_FINISH();
936 IEM_MC_END();
937 }
938 }
939
940 /**
941 * @opdone
942 * @opmnemonic udvex660f382arg
943 * @opcode 0x2a
944 * @opcodesub 11 mr/reg
945 * @oppfx 0x66
946 * @opunused immediate
947 * @opcpuid avx
948 * @optest ->
949 */
950 else
951 IEMOP_RAISE_INVALID_OPCODE_RET();
952}
953
954
955/** Opcode VEX.66.0F38 0x2b. */
956FNIEMOP_DEF(iemOp_vpackusdw_Vx_Hx_Wx)
957{
958 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSDW, vpackusdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
959 IEMOPMEDIAOPTF3_INIT_VARS( vpackusdw);
960 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
961}
962
963
964/** Opcode VEX.66.0F38 0x2c. */
965FNIEMOP_DEF(iemOp_vmaskmovps_Vx_Hx_Mx)
966{
967 // IEMOP_MNEMONIC3(RM, VMASKMOVPS, vmaskmovps, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
969 if (!IEM_IS_MODRM_REG_MODE(bRm))
970 {
971 if (pVCpu->iem.s.uVexLength)
972 {
973 /*
974 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
975 */
976 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
977 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
978 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
979 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
981 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
982 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
983
984 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
985 IEM_MC_PREPARE_AVX_USAGE();
986
987 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
988
989 IEM_MC_END();
990 }
991 else
992 {
993 /*
994 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
995 */
996 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
997 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
998 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
999 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1001 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1002 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1003
1004 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1005 IEM_MC_PREPARE_AVX_USAGE();
1006
1007 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
1008
1009 IEM_MC_END();
1010 }
1011 }
1012 else
1013 {
1014 /* The register, register encoding is invalid. */
1015 IEMOP_RAISE_INVALID_OPCODE_RET();
1016 }
1017}
1018
1019
1020/** Opcode VEX.66.0F38 0x2d. */
1021FNIEMOP_DEF(iemOp_vmaskmovpd_Vx_Hx_Mx)
1022{
1023 // IEMOP_MNEMONIC3(RM, VMASKMOVPD, vmaskmovpd, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1025 if (!IEM_IS_MODRM_REG_MODE(bRm))
1026 {
1027 if (pVCpu->iem.s.uVexLength)
1028 {
1029 /*
1030 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
1031 */
1032 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1033 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1034 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1035 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1037 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1038 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1039
1040 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1041 IEM_MC_PREPARE_AVX_USAGE();
1042
1043 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
1044
1045 IEM_MC_END();
1046 }
1047 else
1048 {
1049 /*
1050 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
1051 */
1052 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1053 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1054 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1055 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1057 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1058 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1059
1060 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1061 IEM_MC_PREPARE_AVX_USAGE();
1062
1063 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
1064
1065 IEM_MC_END();
1066 }
1067 }
1068 else
1069 {
1070 /* The register, register encoding is invalid. */
1071 IEMOP_RAISE_INVALID_OPCODE_RET();
1072 }
1073}
1074
1075
1076/** Opcode VEX.66.0F38 0x2e. */
1077FNIEMOP_DEF(iemOp_vmaskmovps_Mx_Hx_Vx)
1078{
1079 // IEMOP_MNEMONIC3(RM, VMASKMOVPS, vmaskmovps, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1080 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1081 if (!IEM_IS_MODRM_REG_MODE(bRm))
1082 {
1083 if (pVCpu->iem.s.uVexLength)
1084 {
1085 /*
1086 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
1087 */
1088 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1089
1090 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1092 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1093 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1094 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1095 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1096
1097 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1098 IEM_MC_PREPARE_AVX_USAGE();
1099
1100 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
1101
1102 IEM_MC_END();
1103 }
1104 else
1105 {
1106 /*
1107 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
1108 */
1109 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1110
1111 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1113 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1114 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1115 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1116 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1117
1118 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1119 IEM_MC_PREPARE_AVX_USAGE();
1120
1121 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
1122
1123 IEM_MC_END();
1124 }
1125 }
1126 else
1127 {
1128 /* The register, register encoding is invalid. */
1129 IEMOP_RAISE_INVALID_OPCODE_RET();
1130 }
1131}
1132
1133
1134/** Opcode VEX.66.0F38 0x2f. */
1135FNIEMOP_DEF(iemOp_vmaskmovpd_Mx_Hx_Vx)
1136{
1137 // IEMOP_MNEMONIC3(RM, VMASKMOVPD, vmaskmovpd, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1139 if (!IEM_IS_MODRM_REG_MODE(bRm))
1140 {
1141 if (pVCpu->iem.s.uVexLength)
1142 {
1143 /*
1144 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
1145 */
1146 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1147
1148 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1150 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1151 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1152 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1153 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1154
1155 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1156 IEM_MC_PREPARE_AVX_USAGE();
1157
1158 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
1159
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
1166 */
1167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1168
1169 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1171 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1172 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1173 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1174 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1175
1176 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1177 IEM_MC_PREPARE_AVX_USAGE();
1178
1179 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
1180
1181 IEM_MC_END();
1182 }
1183 }
1184 else
1185 {
1186 /* The register, register encoding is invalid. */
1187 IEMOP_RAISE_INVALID_OPCODE_RET();
1188 }
1189}
1190
1191
1192/** Opcode VEX.66.0F38 0x30. */
1193FNIEMOP_DEF(iemOp_vpmovzxbw_Vx_UxMq)
1194{
1195 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1196 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBW, vpmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1197 IEMOP_BODY_VPMOV_S_Z(vpmovzxbw, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1198}
1199
1200
1201/** Opcode VEX.66.0F38 0x31. */
1202FNIEMOP_DEF(iemOp_vpmovzxbd_Vx_UxMd)
1203{
1204 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1205 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBD, vpmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1206 IEMOP_BODY_VPMOV_S_Z(vpmovzxbd, 32, IEM_MC_FETCH_MEM_U128);
1207}
1208
1209
1210/** Opcode VEX.66.0F38 0x32. */
1211FNIEMOP_DEF(iemOp_vpmovzxbq_Vx_UxMw)
1212{
1213 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1214 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBQ, vpmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1215 IEMOP_BODY_VPMOV_S_Z(vpmovzxbq, 16, IEM_MC_FETCH_MEM_U128);
1216}
1217
1218
1219/** Opcode VEX.66.0F38 0x33. */
1220FNIEMOP_DEF(iemOp_vpmovzxwd_Vx_UxMq)
1221{
1222 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1223 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWD, vpmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1224 IEMOP_BODY_VPMOV_S_Z(vpmovzxwd, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1225}
1226
1227
1228/** Opcode VEX.66.0F38 0x34. */
1229FNIEMOP_DEF(iemOp_vpmovzxwq_Vx_UxMd)
1230{
1231 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1232 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWQ, vpmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1233 IEMOP_BODY_VPMOV_S_Z(vpmovzxwq, 32, IEM_MC_FETCH_MEM_U128);
1234}
1235
1236
1237/** Opcode VEX.66.0F38 0x35. */
1238FNIEMOP_DEF(iemOp_vpmovzxdq_Vx_UxMq)
1239{
1240 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1241 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXDQ, vpmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1242 IEMOP_BODY_VPMOV_S_Z(vpmovzxdq, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1243}
1244
1245
1246/* Opcode VEX.66.0F38 0x36. */
1247FNIEMOP_STUB(iemOp_vpermd_Vqq_Hqq_Wqq);
1248
1249
1250/** Opcode VEX.66.0F38 0x37. */
1251FNIEMOP_DEF(iemOp_vpcmpgtq_Vx_Hx_Wx)
1252{
1253 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTQ, vpcmpgtq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1254 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtq);
1255 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1256}
1257
1258
1259/** Opcode VEX.66.0F38 0x38. */
1260FNIEMOP_DEF(iemOp_vpminsb_Vx_Hx_Wx)
1261{
1262 IEMOP_MNEMONIC3(VEX_RVM, VPMINSB, vpminsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1263 IEMOPMEDIAOPTF3_INIT_VARS( vpminsb);
1264 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1265}
1266
1267
1268/** Opcode VEX.66.0F38 0x39. */
1269FNIEMOP_DEF(iemOp_vpminsd_Vx_Hx_Wx)
1270{
1271 IEMOP_MNEMONIC3(VEX_RVM, VPMINSD, vpminsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1272 IEMOPMEDIAOPTF3_INIT_VARS( vpminsd);
1273 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1274}
1275
1276
1277/** Opcode VEX.66.0F38 0x3a. */
1278FNIEMOP_DEF(iemOp_vpminuw_Vx_Hx_Wx)
1279{
1280 IEMOP_MNEMONIC3(VEX_RVM, VPMINUW, vpminuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1281 IEMOPMEDIAOPTF3_INIT_VARS( vpminuw);
1282 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1283}
1284
1285
1286/** Opcode VEX.66.0F38 0x3b. */
1287FNIEMOP_DEF(iemOp_vpminud_Vx_Hx_Wx)
1288{
1289 IEMOP_MNEMONIC3(VEX_RVM, VPMINUD, vpminud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1290 IEMOPMEDIAOPTF3_INIT_VARS( vpminud);
1291 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1292}
1293
1294
1295/** Opcode VEX.66.0F38 0x3c. */
1296FNIEMOP_DEF(iemOp_vpmaxsb_Vx_Hx_Wx)
1297{
1298 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSB, vpmaxsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1299 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxsb);
1300 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1301}
1302
1303
1304/** Opcode VEX.66.0F38 0x3d. */
1305FNIEMOP_DEF(iemOp_vpmaxsd_Vx_Hx_Wx)
1306{
1307 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSD, vpmaxsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1308 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxsd);
1309 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1310}
1311
1312
1313/** Opcode VEX.66.0F38 0x3e. */
1314FNIEMOP_DEF(iemOp_vpmaxuw_Vx_Hx_Wx)
1315{
1316 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUW, vpmaxuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1317 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxuw);
1318 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1319}
1320
1321
1322/** Opcode VEX.66.0F38 0x3f. */
1323FNIEMOP_DEF(iemOp_vpmaxud_Vx_Hx_Wx)
1324{
1325 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUD, vpmaxud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1326 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxud);
1327 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1328}
1329
1330
1331/** Opcode VEX.66.0F38 0x40. */
1332FNIEMOP_DEF(iemOp_vpmulld_Vx_Hx_Wx)
1333{
1334 IEMOP_MNEMONIC3(VEX_RVM, VPMULLD, vpmulld, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1335 IEMOPMEDIAOPTF3_INIT_VARS(vpmulld);
1336 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1337}
1338
1339
1340/** Opcode VEX.66.0F38 0x41. */
1341FNIEMOP_DEF(iemOp_vphminposuw_Vdq_Wdq)
1342{
1343 IEMOP_MNEMONIC2(VEX_RM, VPHMINPOSUW, vphminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1345 if (IEM_IS_MODRM_REG_MODE(bRm))
1346 {
1347 /*
1348 * Register, register.
1349 */
1350 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1351 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1352 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1353 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1354 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1355 IEM_MC_PREPARE_AVX_USAGE();
1356 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1357 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1358 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
1359 puDst, puSrc);
1360 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1361 IEM_MC_ADVANCE_RIP_AND_FINISH();
1362 IEM_MC_END();
1363 }
1364 else
1365 {
1366 /*
1367 * Register, memory.
1368 */
1369 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1370 IEM_MC_LOCAL(RTUINT128U, uSrc);
1371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1372 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1373 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1377 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1378 IEM_MC_PREPARE_AVX_USAGE();
1379
1380 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1381 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1382 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
1383 puDst, puSrc);
1384 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1385
1386 IEM_MC_ADVANCE_RIP_AND_FINISH();
1387 IEM_MC_END();
1388 }
1389}
1390
1391
1392/* Opcode VEX.66.0F38 0x42 - invalid. */
1393/* Opcode VEX.66.0F38 0x43 - invalid. */
1394/* Opcode VEX.66.0F38 0x44 - invalid. */
1395
1396
1397/** Opcode VEX.66.0F38 0x45. */
1398FNIEMOP_DEF(iemOp_vpsrlvd_q_Vx_Hx_Wx)
1399{
1400 IEMOP_MNEMONIC3(VEX_RVM, VPSRLVD, vpsrlvd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1401
1402 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1403 {
1404 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlvq);
1405 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1406 }
1407 else
1408 {
1409 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlvd);
1410 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1411 }
1412}
1413
1414
1415/** Opcode VEX.66.0F38 0x46. */
1416FNIEMOP_DEF(iemOp_vpsravd_Vx_Hx_Wx)
1417{
1418 IEMOP_MNEMONIC3(VEX_RVM, VPSRAVD, vpsravd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1419 IEMOPMEDIAOPTF3_INIT_VARS(vpsravd);
1420 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1421}
1422
1423
1424/** Opcode VEX.66.0F38 0x47. */
1425FNIEMOP_DEF(iemOp_vpsllvd_q_Vx_Hx_Wx)
1426{
1427 IEMOP_MNEMONIC3(VEX_RVM, VPSLLVD, vpsllvd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1428
1429 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1430 {
1431 IEMOPMEDIAOPTF3_INIT_VARS(vpsllvq);
1432 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1433 }
1434 else
1435 {
1436 IEMOPMEDIAOPTF3_INIT_VARS(vpsllvd);
1437 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1438 }
1439}
1440
1441
1442/* Opcode VEX.66.0F38 0x48 - invalid. */
1443/* Opcode VEX.66.0F38 0x49 - invalid. */
1444/* Opcode VEX.66.0F38 0x4a - invalid. */
1445/* Opcode VEX.66.0F38 0x4b - invalid. */
1446/* Opcode VEX.66.0F38 0x4c - invalid. */
1447/* Opcode VEX.66.0F38 0x4d - invalid. */
1448/* Opcode VEX.66.0F38 0x4e - invalid. */
1449/* Opcode VEX.66.0F38 0x4f - invalid. */
1450
1451/* Opcode VEX.66.0F38 0x50 - invalid. */
1452/* Opcode VEX.66.0F38 0x51 - invalid. */
1453/* Opcode VEX.66.0F38 0x52 - invalid. */
1454/* Opcode VEX.66.0F38 0x53 - invalid. */
1455/* Opcode VEX.66.0F38 0x54 - invalid. */
1456/* Opcode VEX.66.0F38 0x55 - invalid. */
1457/* Opcode VEX.66.0F38 0x56 - invalid. */
1458/* Opcode VEX.66.0F38 0x57 - invalid. */
1459
1460
1461/** Opcode VEX.66.0F38 0x58. */
1462FNIEMOP_DEF(iemOp_vpbroadcastd_Vx_Wx)
1463{
1464 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTD, vpbroadcastd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1466 if (IEM_IS_MODRM_REG_MODE(bRm))
1467 {
1468 /*
1469 * Register, register.
1470 */
1471 if (pVCpu->iem.s.uVexLength)
1472 {
1473 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1474 IEM_MC_LOCAL(uint32_t, uSrc);
1475
1476 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1477 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1478 IEM_MC_PREPARE_AVX_USAGE();
1479
1480 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1481 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1482
1483 IEM_MC_ADVANCE_RIP_AND_FINISH();
1484 IEM_MC_END();
1485 }
1486 else
1487 {
1488 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1489 IEM_MC_LOCAL(uint32_t, uSrc);
1490
1491 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1492 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1493 IEM_MC_PREPARE_AVX_USAGE();
1494 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1495 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1496
1497 IEM_MC_ADVANCE_RIP_AND_FINISH();
1498 IEM_MC_END();
1499 }
1500 }
1501 else
1502 {
1503 /*
1504 * Register, memory.
1505 */
1506 if (pVCpu->iem.s.uVexLength)
1507 {
1508 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1509 IEM_MC_LOCAL(uint32_t, uSrc);
1510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1511
1512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1513 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1514 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1515 IEM_MC_PREPARE_AVX_USAGE();
1516
1517 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1518 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1519
1520 IEM_MC_ADVANCE_RIP_AND_FINISH();
1521 IEM_MC_END();
1522 }
1523 else
1524 {
1525 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1526 IEM_MC_LOCAL(uint32_t, uSrc);
1527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1528
1529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1530 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1532 IEM_MC_PREPARE_AVX_USAGE();
1533
1534 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1535 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1536
1537 IEM_MC_ADVANCE_RIP_AND_FINISH();
1538 IEM_MC_END();
1539 }
1540 }
1541}
1542
1543
1544/** Opcode VEX.66.0F38 0x59. */
1545FNIEMOP_DEF(iemOp_vpbroadcastq_Vx_Wx)
1546{
1547 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTQ, vpbroadcastq, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1549 if (IEM_IS_MODRM_REG_MODE(bRm))
1550 {
1551 /*
1552 * Register, register.
1553 */
1554 if (pVCpu->iem.s.uVexLength)
1555 {
1556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1557 IEM_MC_LOCAL(uint64_t, uSrc);
1558
1559 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1560 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1561 IEM_MC_PREPARE_AVX_USAGE();
1562
1563 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1564 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1565
1566 IEM_MC_ADVANCE_RIP_AND_FINISH();
1567 IEM_MC_END();
1568 }
1569 else
1570 {
1571 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1572 IEM_MC_LOCAL(uint64_t, uSrc);
1573
1574 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1575 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1576 IEM_MC_PREPARE_AVX_USAGE();
1577 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1578 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1579
1580 IEM_MC_ADVANCE_RIP_AND_FINISH();
1581 IEM_MC_END();
1582 }
1583 }
1584 else
1585 {
1586 /*
1587 * Register, memory.
1588 */
1589 if (pVCpu->iem.s.uVexLength)
1590 {
1591 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1592 IEM_MC_LOCAL(uint64_t, uSrc);
1593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1594
1595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1596 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1597 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1598 IEM_MC_PREPARE_AVX_USAGE();
1599
1600 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1601 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1602
1603 IEM_MC_ADVANCE_RIP_AND_FINISH();
1604 IEM_MC_END();
1605 }
1606 else
1607 {
1608 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1609 IEM_MC_LOCAL(uint64_t, uSrc);
1610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1611
1612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1613 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1614 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1615 IEM_MC_PREPARE_AVX_USAGE();
1616
1617 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1618 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1619
1620 IEM_MC_ADVANCE_RIP_AND_FINISH();
1621 IEM_MC_END();
1622 }
1623 }
1624}
1625
1626
1627/** Opcode VEX.66.0F38 0x5a. */
1628FNIEMOP_DEF(iemOp_vbroadcasti128_Vqq_Mdq)
1629{
1630 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTI128, vbroadcasti128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1632 if (IEM_IS_MODRM_REG_MODE(bRm))
1633 {
1634 /*
1635 * No register, register.
1636 */
1637 IEMOP_RAISE_INVALID_OPCODE_RET();
1638 }
1639 else
1640 {
1641 /*
1642 * Register, memory.
1643 */
1644 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1645 IEM_MC_LOCAL(RTUINT128U, uSrc);
1646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1647
1648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1649 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
1650 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1651 IEM_MC_PREPARE_AVX_USAGE();
1652
1653 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1654 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1655
1656 IEM_MC_ADVANCE_RIP_AND_FINISH();
1657 IEM_MC_END();
1658 }
1659}
1660
1661
1662/* Opcode VEX.66.0F38 0x5b - invalid. */
1663/* Opcode VEX.66.0F38 0x5c - invalid. */
1664/* Opcode VEX.66.0F38 0x5d - invalid. */
1665/* Opcode VEX.66.0F38 0x5e - invalid. */
1666/* Opcode VEX.66.0F38 0x5f - invalid. */
1667
1668/* Opcode VEX.66.0F38 0x60 - invalid. */
1669/* Opcode VEX.66.0F38 0x61 - invalid. */
1670/* Opcode VEX.66.0F38 0x62 - invalid. */
1671/* Opcode VEX.66.0F38 0x63 - invalid. */
1672/* Opcode VEX.66.0F38 0x64 - invalid. */
1673/* Opcode VEX.66.0F38 0x65 - invalid. */
1674/* Opcode VEX.66.0F38 0x66 - invalid. */
1675/* Opcode VEX.66.0F38 0x67 - invalid. */
1676/* Opcode VEX.66.0F38 0x68 - invalid. */
1677/* Opcode VEX.66.0F38 0x69 - invalid. */
1678/* Opcode VEX.66.0F38 0x6a - invalid. */
1679/* Opcode VEX.66.0F38 0x6b - invalid. */
1680/* Opcode VEX.66.0F38 0x6c - invalid. */
1681/* Opcode VEX.66.0F38 0x6d - invalid. */
1682/* Opcode VEX.66.0F38 0x6e - invalid. */
1683/* Opcode VEX.66.0F38 0x6f - invalid. */
1684
1685/* Opcode VEX.66.0F38 0x70 - invalid. */
1686/* Opcode VEX.66.0F38 0x71 - invalid. */
1687/* Opcode VEX.66.0F38 0x72 - invalid. */
1688/* Opcode VEX.66.0F38 0x73 - invalid. */
1689/* Opcode VEX.66.0F38 0x74 - invalid. */
1690/* Opcode VEX.66.0F38 0x75 - invalid. */
1691/* Opcode VEX.66.0F38 0x76 - invalid. */
1692/* Opcode VEX.66.0F38 0x77 - invalid. */
1693
1694
1695/** Opcode VEX.66.0F38 0x78. */
1696FNIEMOP_DEF(iemOp_vpbroadcastb_Vx_Wx)
1697{
1698 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTB, vpbroadcastb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1700 if (IEM_IS_MODRM_REG_MODE(bRm))
1701 {
1702 /*
1703 * Register, register.
1704 */
1705 if (pVCpu->iem.s.uVexLength)
1706 {
1707 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1708 IEM_MC_LOCAL(uint8_t, uSrc);
1709
1710 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1712 IEM_MC_PREPARE_AVX_USAGE();
1713
1714 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1715 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1716
1717 IEM_MC_ADVANCE_RIP_AND_FINISH();
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1723 IEM_MC_LOCAL(uint8_t, uSrc);
1724
1725 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1726 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1727 IEM_MC_PREPARE_AVX_USAGE();
1728 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1729 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1730
1731 IEM_MC_ADVANCE_RIP_AND_FINISH();
1732 IEM_MC_END();
1733 }
1734 }
1735 else
1736 {
1737 /*
1738 * Register, memory.
1739 */
1740 if (pVCpu->iem.s.uVexLength)
1741 {
1742 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1743 IEM_MC_LOCAL(uint8_t, uSrc);
1744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1745
1746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1747 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1748 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1749 IEM_MC_PREPARE_AVX_USAGE();
1750
1751 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1752 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1753
1754 IEM_MC_ADVANCE_RIP_AND_FINISH();
1755 IEM_MC_END();
1756 }
1757 else
1758 {
1759 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1760 IEM_MC_LOCAL(uint8_t, uSrc);
1761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1762
1763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1764 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1765 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1766 IEM_MC_PREPARE_AVX_USAGE();
1767
1768 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1769 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1770
1771 IEM_MC_ADVANCE_RIP_AND_FINISH();
1772 IEM_MC_END();
1773 }
1774 }
1775}
1776
1777
1778/** Opcode VEX.66.0F38 0x79. */
1779FNIEMOP_DEF(iemOp_vpbroadcastw_Vx_Wx)
1780{
1781 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTW, vpbroadcastw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1782 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1783 if (IEM_IS_MODRM_REG_MODE(bRm))
1784 {
1785 /*
1786 * Register, register.
1787 */
1788 if (pVCpu->iem.s.uVexLength)
1789 {
1790 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1791 IEM_MC_LOCAL(uint16_t, uSrc);
1792
1793 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1794 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1795 IEM_MC_PREPARE_AVX_USAGE();
1796
1797 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1798 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1799
1800 IEM_MC_ADVANCE_RIP_AND_FINISH();
1801 IEM_MC_END();
1802 }
1803 else
1804 {
1805 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1806 IEM_MC_LOCAL(uint16_t, uSrc);
1807
1808 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1809 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1810 IEM_MC_PREPARE_AVX_USAGE();
1811 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1812 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1813
1814 IEM_MC_ADVANCE_RIP_AND_FINISH();
1815 IEM_MC_END();
1816 }
1817 }
1818 else
1819 {
1820 /*
1821 * Register, memory.
1822 */
1823 if (pVCpu->iem.s.uVexLength)
1824 {
1825 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1826 IEM_MC_LOCAL(uint16_t, uSrc);
1827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1828
1829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1830 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1831 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1832 IEM_MC_PREPARE_AVX_USAGE();
1833
1834 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1835 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1836
1837 IEM_MC_ADVANCE_RIP_AND_FINISH();
1838 IEM_MC_END();
1839 }
1840 else
1841 {
1842 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1843 IEM_MC_LOCAL(uint16_t, uSrc);
1844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1845
1846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1847 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1848 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1849 IEM_MC_PREPARE_AVX_USAGE();
1850
1851 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1852 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1853
1854 IEM_MC_ADVANCE_RIP_AND_FINISH();
1855 IEM_MC_END();
1856 }
1857 }
1858}
1859
1860
1861/* Opcode VEX.66.0F38 0x7a - invalid. */
1862/* Opcode VEX.66.0F38 0x7b - invalid. */
1863/* Opcode VEX.66.0F38 0x7c - invalid. */
1864/* Opcode VEX.66.0F38 0x7d - invalid. */
1865/* Opcode VEX.66.0F38 0x7e - invalid. */
1866/* Opcode VEX.66.0F38 0x7f - invalid. */
1867
1868/* Opcode VEX.66.0F38 0x80 - invalid (legacy only). */
1869/* Opcode VEX.66.0F38 0x81 - invalid (legacy only). */
1870/* Opcode VEX.66.0F38 0x82 - invalid (legacy only). */
1871/* Opcode VEX.66.0F38 0x83 - invalid. */
1872/* Opcode VEX.66.0F38 0x84 - invalid. */
1873/* Opcode VEX.66.0F38 0x85 - invalid. */
1874/* Opcode VEX.66.0F38 0x86 - invalid. */
1875/* Opcode VEX.66.0F38 0x87 - invalid. */
1876/* Opcode VEX.66.0F38 0x88 - invalid. */
1877/* Opcode VEX.66.0F38 0x89 - invalid. */
1878/* Opcode VEX.66.0F38 0x8a - invalid. */
1879/* Opcode VEX.66.0F38 0x8b - invalid. */
1880
1881
1882/** Opcode VEX.66.0F38 0x8c. */
1883FNIEMOP_DEF(iemOp_vpmaskmovd_q_Vx_Hx_Mx)
1884{
1885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1886 if (!IEM_IS_MODRM_REG_MODE(bRm))
1887 {
1888 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1889 {
1890 // IEMOP_MNEMONIC3(RM, VPMASKMOVQ, vpmaskmovq, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1891 if (pVCpu->iem.s.uVexLength)
1892 {
1893 /*
1894 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
1895 */
1896 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1897 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1898 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1899 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1901 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1902 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1903
1904 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1905 IEM_MC_PREPARE_AVX_USAGE();
1906
1907 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
1908
1909 IEM_MC_END();
1910 }
1911 else
1912 {
1913 /*
1914 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
1915 */
1916 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1917 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1918 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1919 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1921 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1922 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1923
1924 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1925 IEM_MC_PREPARE_AVX_USAGE();
1926
1927 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
1928
1929 IEM_MC_END();
1930 }
1931 }
1932 else
1933 {
1934 // IEMOP_MNEMONIC3(RM, VPMASKMOVD, vpmaskmovd, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1935 if (pVCpu->iem.s.uVexLength)
1936 {
1937 /*
1938 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
1939 */
1940 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1941 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1942 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1943 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1945 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1946 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1947
1948 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1949 IEM_MC_PREPARE_AVX_USAGE();
1950
1951 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
1952
1953 IEM_MC_END();
1954 }
1955 else
1956 {
1957 /*
1958 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
1959 */
1960 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1961 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1962 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1963 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1965 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1966 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1967
1968 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1969 IEM_MC_PREPARE_AVX_USAGE();
1970
1971 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
1972
1973 IEM_MC_END();
1974 }
1975 }
1976 }
1977 else
1978 {
1979 /* The register, register encoding is invalid. */
1980 IEMOP_RAISE_INVALID_OPCODE_RET();
1981 }
1982}
1983
1984
1985/* Opcode VEX.66.0F38 0x8d - invalid. */
1986/** Opcode VEX.66.0F38 0x8e. */
1987
1988
1989/** Opcode VEX.66.0F38 0x8e. */
1990FNIEMOP_DEF(iemOp_vpmaskmovd_q_Mx_Vx_Hx)
1991{
1992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1993 if (!IEM_IS_MODRM_REG_MODE(bRm))
1994 {
1995 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1996 {
1997 // IEMOP_MNEMONIC3(RM, VPMASKMOVQ, vpmaskmovq, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1998 if (pVCpu->iem.s.uVexLength)
1999 {
2000 /*
2001 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
2002 */
2003 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2004
2005 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2007 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2008 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2009 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2010 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2011
2012 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2013 IEM_MC_PREPARE_AVX_USAGE();
2014
2015 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
2016
2017 IEM_MC_END();
2018 }
2019 else
2020 {
2021 /*
2022 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
2023 */
2024 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2025
2026 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2028 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2029 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2030 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2031 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2032
2033 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2034 IEM_MC_PREPARE_AVX_USAGE();
2035
2036 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
2037
2038 IEM_MC_END();
2039 }
2040 }
2041 else
2042 {
2043 // IEMOP_MNEMONIC3(RM, VPMASKMOVD, vpmaskmovd, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
2044 if (pVCpu->iem.s.uVexLength)
2045 {
2046 /*
2047 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
2048 */
2049 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2050
2051 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2053 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2054 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2055 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2056 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2057
2058 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2059 IEM_MC_PREPARE_AVX_USAGE();
2060
2061 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
2062
2063 IEM_MC_END();
2064 }
2065 else
2066 {
2067 /*
2068 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
2069 */
2070 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2071
2072 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2074 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2075 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2076 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2077 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2078
2079 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2080 IEM_MC_PREPARE_AVX_USAGE();
2081
2082 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
2083
2084 IEM_MC_END();
2085 }
2086 }
2087 }
2088 else
2089 {
2090 /* The register, register encoding is invalid. */
2091 IEMOP_RAISE_INVALID_OPCODE_RET();
2092 }
2093}
2094
2095
2096/* Opcode VEX.66.0F38 0x8f - invalid. */
2097
2098/** Opcode VEX.66.0F38 0x90 (vex only). */
2099FNIEMOP_STUB(iemOp_vpgatherdd_q_Vx_Hx_Wx);
2100/** Opcode VEX.66.0F38 0x91 (vex only). */
2101FNIEMOP_STUB(iemOp_vpgatherqd_q_Vx_Hx_Wx);
2102/** Opcode VEX.66.0F38 0x92 (vex only). */
2103FNIEMOP_STUB(iemOp_vgatherdps_d_Vx_Hx_Wx);
2104/** Opcode VEX.66.0F38 0x93 (vex only). */
2105FNIEMOP_STUB(iemOp_vgatherqps_d_Vx_Hx_Wx);
2106/* Opcode VEX.66.0F38 0x94 - invalid. */
2107/* Opcode VEX.66.0F38 0x95 - invalid. */
2108/** Opcode VEX.66.0F38 0x96 (vex only). */
2109FNIEMOP_STUB(iemOp_vfmaddsub132ps_d_Vx_Hx_Wx);
2110/** Opcode VEX.66.0F38 0x97 (vex only). */
2111FNIEMOP_STUB(iemOp_vfmsubadd132ps_d_Vx_Hx_Wx);
2112/** Opcode VEX.66.0F38 0x98 (vex only). */
2113FNIEMOP_STUB(iemOp_vfmadd132ps_d_Vx_Hx_Wx);
2114/** Opcode VEX.66.0F38 0x99 (vex only). */
2115FNIEMOP_STUB(iemOp_vfmadd132ss_d_Vx_Hx_Wx);
2116/** Opcode VEX.66.0F38 0x9a (vex only). */
2117FNIEMOP_STUB(iemOp_vfmsub132ps_d_Vx_Hx_Wx);
2118/** Opcode VEX.66.0F38 0x9b (vex only). */
2119FNIEMOP_STUB(iemOp_vfmsub132ss_d_Vx_Hx_Wx);
2120/** Opcode VEX.66.0F38 0x9c (vex only). */
2121FNIEMOP_STUB(iemOp_vfnmadd132ps_d_Vx_Hx_Wx);
2122/** Opcode VEX.66.0F38 0x9d (vex only). */
2123FNIEMOP_STUB(iemOp_vfnmadd132ss_d_Vx_Hx_Wx);
2124/** Opcode VEX.66.0F38 0x9e (vex only). */
2125FNIEMOP_STUB(iemOp_vfnmsub132ps_d_Vx_Hx_Wx);
2126/** Opcode VEX.66.0F38 0x9f (vex only). */
2127FNIEMOP_STUB(iemOp_vfnmsub132ss_d_Vx_Hx_Wx);
2128
2129/* Opcode VEX.66.0F38 0xa0 - invalid. */
2130/* Opcode VEX.66.0F38 0xa1 - invalid. */
2131/* Opcode VEX.66.0F38 0xa2 - invalid. */
2132/* Opcode VEX.66.0F38 0xa3 - invalid. */
2133/* Opcode VEX.66.0F38 0xa4 - invalid. */
2134/* Opcode VEX.66.0F38 0xa5 - invalid. */
2135/** Opcode VEX.66.0F38 0xa6 (vex only). */
2136FNIEMOP_STUB(iemOp_vfmaddsub213ps_d_Vx_Hx_Wx);
2137/** Opcode VEX.66.0F38 0xa7 (vex only). */
2138FNIEMOP_STUB(iemOp_vfmsubadd213ps_d_Vx_Hx_Wx);
2139/** Opcode VEX.66.0F38 0xa8 (vex only). */
2140FNIEMOP_STUB(iemOp_vfmadd213ps_d_Vx_Hx_Wx);
2141/** Opcode VEX.66.0F38 0xa9 (vex only). */
2142FNIEMOP_STUB(iemOp_vfmadd213ss_d_Vx_Hx_Wx);
2143/** Opcode VEX.66.0F38 0xaa (vex only). */
2144FNIEMOP_STUB(iemOp_vfmsub213ps_d_Vx_Hx_Wx);
2145/** Opcode VEX.66.0F38 0xab (vex only). */
2146FNIEMOP_STUB(iemOp_vfmsub213ss_d_Vx_Hx_Wx);
2147/** Opcode VEX.66.0F38 0xac (vex only). */
2148FNIEMOP_STUB(iemOp_vfnmadd213ps_d_Vx_Hx_Wx);
2149/** Opcode VEX.66.0F38 0xad (vex only). */
2150FNIEMOP_STUB(iemOp_vfnmadd213ss_d_Vx_Hx_Wx);
2151/** Opcode VEX.66.0F38 0xae (vex only). */
2152FNIEMOP_STUB(iemOp_vfnmsub213ps_d_Vx_Hx_Wx);
2153/** Opcode VEX.66.0F38 0xaf (vex only). */
2154FNIEMOP_STUB(iemOp_vfnmsub213ss_d_Vx_Hx_Wx);
2155
2156/* Opcode VEX.66.0F38 0xb0 - invalid. */
2157/* Opcode VEX.66.0F38 0xb1 - invalid. */
2158/* Opcode VEX.66.0F38 0xb2 - invalid. */
2159/* Opcode VEX.66.0F38 0xb3 - invalid. */
2160/* Opcode VEX.66.0F38 0xb4 - invalid. */
2161/* Opcode VEX.66.0F38 0xb5 - invalid. */
2162/** Opcode VEX.66.0F38 0xb6 (vex only). */
2163FNIEMOP_STUB(iemOp_vfmaddsub231ps_d_Vx_Hx_Wx);
2164/** Opcode VEX.66.0F38 0xb7 (vex only). */
2165FNIEMOP_STUB(iemOp_vfmsubadd231ps_d_Vx_Hx_Wx);
2166/** Opcode VEX.66.0F38 0xb8 (vex only). */
2167FNIEMOP_STUB(iemOp_vfmadd231ps_d_Vx_Hx_Wx);
2168/** Opcode VEX.66.0F38 0xb9 (vex only). */
2169FNIEMOP_STUB(iemOp_vfmadd231ss_d_Vx_Hx_Wx);
2170/** Opcode VEX.66.0F38 0xba (vex only). */
2171FNIEMOP_STUB(iemOp_vfmsub231ps_d_Vx_Hx_Wx);
2172/** Opcode VEX.66.0F38 0xbb (vex only). */
2173FNIEMOP_STUB(iemOp_vfmsub231ss_d_Vx_Hx_Wx);
2174/** Opcode VEX.66.0F38 0xbc (vex only). */
2175FNIEMOP_STUB(iemOp_vfnmadd231ps_d_Vx_Hx_Wx);
2176/** Opcode VEX.66.0F38 0xbd (vex only). */
2177FNIEMOP_STUB(iemOp_vfnmadd231ss_d_Vx_Hx_Wx);
2178/** Opcode VEX.66.0F38 0xbe (vex only). */
2179FNIEMOP_STUB(iemOp_vfnmsub231ps_d_Vx_Hx_Wx);
2180/** Opcode VEX.66.0F38 0xbf (vex only). */
2181FNIEMOP_STUB(iemOp_vfnmsub231ss_d_Vx_Hx_Wx);
2182
2183/* Opcode VEX.0F38 0xc0 - invalid. */
2184/* Opcode VEX.66.0F38 0xc0 - invalid. */
2185/* Opcode VEX.0F38 0xc1 - invalid. */
2186/* Opcode VEX.66.0F38 0xc1 - invalid. */
2187/* Opcode VEX.0F38 0xc2 - invalid. */
2188/* Opcode VEX.66.0F38 0xc2 - invalid. */
2189/* Opcode VEX.0F38 0xc3 - invalid. */
2190/* Opcode VEX.66.0F38 0xc3 - invalid. */
2191/* Opcode VEX.0F38 0xc4 - invalid. */
2192/* Opcode VEX.66.0F38 0xc4 - invalid. */
2193/* Opcode VEX.0F38 0xc5 - invalid. */
2194/* Opcode VEX.66.0F38 0xc5 - invalid. */
2195/* Opcode VEX.0F38 0xc6 - invalid. */
2196/* Opcode VEX.66.0F38 0xc6 - invalid. */
2197/* Opcode VEX.0F38 0xc7 - invalid. */
2198/* Opcode VEX.66.0F38 0xc7 - invalid. */
2199/* Opcode VEX.0F38 0xc8 - invalid. */
2200/* Opcode VEX.66.0F38 0xc8 - invalid. */
2201/* Opcode VEX.0F38 0xc9 - invalid. */
2202/* Opcode VEX.66.0F38 0xc9 - invalid. */
2203/* Opcode VEX.0F38 0xca. */
2204/* Opcode VEX.66.0F38 0xca - invalid. */
2205/* Opcode VEX.0F38 0xcb - invalid. */
2206/* Opcode VEX.66.0F38 0xcb - invalid. */
2207/* Opcode VEX.0F38 0xcc - invalid. */
2208/* Opcode VEX.66.0F38 0xcc - invalid. */
2209/* Opcode VEX.0F38 0xcd - invalid. */
2210/* Opcode VEX.66.0F38 0xcd - invalid. */
2211/* Opcode VEX.0F38 0xce - invalid. */
2212/* Opcode VEX.66.0F38 0xce - invalid. */
2213/* Opcode VEX.0F38 0xcf - invalid. */
2214/* Opcode VEX.66.0F38 0xcf - invalid. */
2215
2216/* Opcode VEX.66.0F38 0xd0 - invalid. */
2217/* Opcode VEX.66.0F38 0xd1 - invalid. */
2218/* Opcode VEX.66.0F38 0xd2 - invalid. */
2219/* Opcode VEX.66.0F38 0xd3 - invalid. */
2220/* Opcode VEX.66.0F38 0xd4 - invalid. */
2221/* Opcode VEX.66.0F38 0xd5 - invalid. */
2222/* Opcode VEX.66.0F38 0xd6 - invalid. */
2223/* Opcode VEX.66.0F38 0xd7 - invalid. */
2224/* Opcode VEX.66.0F38 0xd8 - invalid. */
2225/* Opcode VEX.66.0F38 0xd9 - invalid. */
2226/* Opcode VEX.66.0F38 0xda - invalid. */
2227/** Opcode VEX.66.0F38 0xdb. */
2228FNIEMOP_STUB(iemOp_vaesimc_Vdq_Wdq);
2229/** Opcode VEX.66.0F38 0xdc. */
2230FNIEMOP_STUB(iemOp_vaesenc_Vdq_Wdq);
2231/** Opcode VEX.66.0F38 0xdd. */
2232FNIEMOP_STUB(iemOp_vaesenclast_Vdq_Wdq);
2233/** Opcode VEX.66.0F38 0xde. */
2234FNIEMOP_STUB(iemOp_vaesdec_Vdq_Wdq);
2235/** Opcode VEX.66.0F38 0xdf. */
2236FNIEMOP_STUB(iemOp_vaesdeclast_Vdq_Wdq);
2237
2238/* Opcode VEX.66.0F38 0xe0 - invalid. */
2239/* Opcode VEX.66.0F38 0xe1 - invalid. */
2240/* Opcode VEX.66.0F38 0xe2 - invalid. */
2241/* Opcode VEX.66.0F38 0xe3 - invalid. */
2242/* Opcode VEX.66.0F38 0xe4 - invalid. */
2243/* Opcode VEX.66.0F38 0xe5 - invalid. */
2244/* Opcode VEX.66.0F38 0xe6 - invalid. */
2245/* Opcode VEX.66.0F38 0xe7 - invalid. */
2246/* Opcode VEX.66.0F38 0xe8 - invalid. */
2247/* Opcode VEX.66.0F38 0xe9 - invalid. */
2248/* Opcode VEX.66.0F38 0xea - invalid. */
2249/* Opcode VEX.66.0F38 0xeb - invalid. */
2250/* Opcode VEX.66.0F38 0xec - invalid. */
2251/* Opcode VEX.66.0F38 0xed - invalid. */
2252/* Opcode VEX.66.0F38 0xee - invalid. */
2253/* Opcode VEX.66.0F38 0xef - invalid. */
2254
2255
2256/* Opcode VEX.0F38 0xf0 - invalid (legacy only). */
2257/* Opcode VEX.66.0F38 0xf0 - invalid (legacy only). */
2258/* Opcode VEX.F3.0F38 0xf0 - invalid. */
2259/* Opcode VEX.F2.0F38 0xf0 - invalid (legacy only). */
2260
2261/* Opcode VEX.0F38 0xf1 - invalid (legacy only). */
2262/* Opcode VEX.66.0F38 0xf1 - invalid (legacy only). */
2263/* Opcode VEX.F3.0F38 0xf1 - invalid. */
2264/* Opcode VEX.F2.0F38 0xf1 - invalid (legacy only). */
2265
2266/**
2267 * @opcode 0xf2
2268 * @oppfx none
2269 * @opflmodify cf,pf,af,zf,sf,of
2270 * @opflclear cf,of
2271 * @opflundef pf,af
2272 * @note VEX only
2273 */
2274FNIEMOP_DEF(iemOp_andn_Gy_By_Ey)
2275{
2276 IEMOP_MNEMONIC3(VEX_RVM, ANDN, andn, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2277 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2278 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF);
2279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2280 if (IEM_IS_MODRM_REG_MODE(bRm))
2281 {
2282 /*
2283 * Register, register.
2284 */
2285 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2286 {
2287 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2288 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2289 IEM_MC_ARG(uint64_t *, pDst, 0);
2290 IEM_MC_ARG(uint64_t, uSrc1, 1);
2291 IEM_MC_ARG(uint64_t, uSrc2, 2);
2292 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2293 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2294 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2295 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2296 IEM_MC_REF_EFLAGS(pEFlags);
2297 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
2298 pDst, uSrc1, uSrc2, pEFlags);
2299 IEM_MC_ADVANCE_RIP_AND_FINISH();
2300 IEM_MC_END();
2301 }
2302 else
2303 {
2304 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2305 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2306 IEM_MC_ARG(uint32_t *, pDst, 0);
2307 IEM_MC_ARG(uint32_t, uSrc1, 1);
2308 IEM_MC_ARG(uint32_t, uSrc2, 2);
2309 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2310 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2311 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2312 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2313 IEM_MC_REF_EFLAGS(pEFlags);
2314 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
2315 pDst, uSrc1, uSrc2, pEFlags);
2316 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2317 IEM_MC_ADVANCE_RIP_AND_FINISH();
2318 IEM_MC_END();
2319 }
2320 }
2321 else
2322 {
2323 /*
2324 * Register, memory.
2325 */
2326 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2327 {
2328 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2329 IEM_MC_ARG(uint64_t *, pDst, 0);
2330 IEM_MC_ARG(uint64_t, uSrc1, 1);
2331 IEM_MC_ARG(uint64_t, uSrc2, 2);
2332 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2335 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2336 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2337 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2338 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2339 IEM_MC_REF_EFLAGS(pEFlags);
2340 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
2341 pDst, uSrc1, uSrc2, pEFlags);
2342 IEM_MC_ADVANCE_RIP_AND_FINISH();
2343 IEM_MC_END();
2344 }
2345 else
2346 {
2347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2348 IEM_MC_ARG(uint32_t *, pDst, 0);
2349 IEM_MC_ARG(uint32_t, uSrc1, 1);
2350 IEM_MC_ARG(uint32_t, uSrc2, 2);
2351 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2354 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2355 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2356 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2357 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2358 IEM_MC_REF_EFLAGS(pEFlags);
2359 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
2360 pDst, uSrc1, uSrc2, pEFlags);
2361 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2362 IEM_MC_ADVANCE_RIP_AND_FINISH();
2363 IEM_MC_END();
2364 }
2365 }
2366}
2367
2368/* Opcode VEX.66.0F38 0xf2 - invalid. */
2369/* Opcode VEX.F3.0F38 0xf2 - invalid. */
2370/* Opcode VEX.F2.0F38 0xf2 - invalid. */
2371
2372
2373/* Opcode VEX.0F38 0xf3 - invalid. */
2374/* Opcode VEX.66.0F38 0xf3 - invalid. */
2375
2376/* Opcode VEX.F3.0F38 0xf3 /0 - invalid. */
2377
2378/** Body for the vex group 17 instructions. */
2379#define IEMOP_BODY_By_Ey(a_Instr) \
2380 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2381 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF); \
2382 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2383 { \
2384 /* \
2385 * Register, register. \
2386 */ \
2387 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2388 { \
2389 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2390 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2391 IEM_MC_ARG(uint64_t, uSrc, 2); \
2392 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2393 IEM_MC_ARG(uint64_t *, pDst, 1); \
2394 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2395 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2396 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2397 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
2398 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \
2399 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2400 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2401 IEM_MC_END(); \
2402 } \
2403 else \
2404 { \
2405 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2406 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2407 IEM_MC_ARG(uint32_t, uSrc, 2); \
2408 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2409 IEM_MC_ARG(uint32_t *, pDst, 1); \
2410 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2411 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2412 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2413 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
2414 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \
2415 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2416 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2417 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2418 IEM_MC_END(); \
2419 } \
2420 } \
2421 else \
2422 { \
2423 /* \
2424 * Register, memory. \
2425 */ \
2426 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2427 { \
2428 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2431 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2432 \
2433 IEM_MC_ARG(uint64_t, uSrc, 2); \
2434 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2435 IEM_MC_ARG(uint64_t *, pDst, 1); \
2436 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2437 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2438 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2439 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
2440 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \
2441 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2442 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2443 IEM_MC_END(); \
2444 } \
2445 else \
2446 { \
2447 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2450 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2451 \
2452 IEM_MC_ARG(uint32_t, uSrc, 2); \
2453 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2454 IEM_MC_ARG(uint32_t *, pDst, 1); \
2455 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2456 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2457 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2458 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
2459 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \
2460 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2461 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2462 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2463 IEM_MC_END(); \
2464 } \
2465 } \
2466 (void)0
2467
2468
2469/**
2470 * @opmaps vexgrp17
2471 * @opcode /1
2472 * @opflmodify cf,pf,af,zf,sf,of
2473 * @opflclear of
2474 * @opflundef pf,af
2475 */
2476FNIEMOP_DEF_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm)
2477{
2478 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2479 IEMOP_BODY_By_Ey(blsr);
2480}
2481
2482
2483/**
2484 * @opmaps vexgrp17
2485 * @opcode /2
2486 * @opflmodify cf,pf,af,zf,sf,of
2487 * @opflclear zf,of
2488 * @opflundef pf,af
2489 */
2490FNIEMOP_DEF_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm)
2491{
2492 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2493 IEMOP_BODY_By_Ey(blsmsk);
2494}
2495
2496
2497/**
2498 * @opmaps vexgrp17
2499 * @opcode /3
2500 * @opflmodify cf,pf,af,zf,sf,of
2501 * @opflclear of
2502 * @opflundef pf,af
2503 */
2504FNIEMOP_DEF_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm)
2505{
2506 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2507 IEMOP_BODY_By_Ey(blsi);
2508}
2509
2510
2511/* Opcode VEX.F3.0F38 0xf3 /4 - invalid. */
2512/* Opcode VEX.F3.0F38 0xf3 /5 - invalid. */
2513/* Opcode VEX.F3.0F38 0xf3 /6 - invalid. */
2514/* Opcode VEX.F3.0F38 0xf3 /7 - invalid. */
2515
2516/**
2517 * Group 17 jump table for the VEX.F3 variant.
2518 */
2519IEM_STATIC const PFNIEMOPRM g_apfnVexGroup17_f3[] =
2520{
2521 /* /0 */ iemOp_InvalidWithRM,
2522 /* /1 */ iemOp_VGrp17_blsr_By_Ey,
2523 /* /2 */ iemOp_VGrp17_blsmsk_By_Ey,
2524 /* /3 */ iemOp_VGrp17_blsi_By_Ey,
2525 /* /4 */ iemOp_InvalidWithRM,
2526 /* /5 */ iemOp_InvalidWithRM,
2527 /* /6 */ iemOp_InvalidWithRM,
2528 /* /7 */ iemOp_InvalidWithRM
2529};
2530AssertCompile(RT_ELEMENTS(g_apfnVexGroup17_f3) == 8);
2531
2532/** Opcode VEX.F3.0F38 0xf3 - invalid (vex only - group 17). */
2533FNIEMOP_DEF(iemOp_VGrp17_f3)
2534{
2535 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2536 return FNIEMOP_CALL_1(g_apfnVexGroup17_f3[IEM_GET_MODRM_REG_8(bRm)], bRm);
2537}
2538
2539/* Opcode VEX.F2.0F38 0xf3 - invalid (vex only - group 17). */
2540
2541
2542/* Opcode VEX.0F38 0xf4 - invalid. */
2543/* Opcode VEX.66.0F38 0xf4 - invalid. */
2544/* Opcode VEX.F3.0F38 0xf4 - invalid. */
2545/* Opcode VEX.F2.0F38 0xf4 - invalid. */
2546
2547/** Body for BZHI, BEXTR, ++; assumes VEX.L must be 0. */
2548#define IEMOP_BODY_Gy_Ey_By(a_Instr, a_fFeatureMember, a_fUndefFlags) \
2549 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2550 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
2551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2552 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2553 { \
2554 /* \
2555 * Register, register. \
2556 */ \
2557 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2558 { \
2559 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2560 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2561 IEM_MC_ARG(uint64_t *, pDst, 0); \
2562 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2563 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2564 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2565 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2566 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2567 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2568 IEM_MC_REF_EFLAGS(pEFlags); \
2569 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2570 iemAImpl_ ## a_Instr ## _u64_fallback), \
2571 pDst, uSrc1, uSrc2, pEFlags); \
2572 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2573 IEM_MC_END(); \
2574 } \
2575 else \
2576 { \
2577 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2578 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2579 IEM_MC_ARG(uint32_t *, pDst, 0); \
2580 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2581 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2582 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2583 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2584 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2585 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2586 IEM_MC_REF_EFLAGS(pEFlags); \
2587 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2588 iemAImpl_ ## a_Instr ## _u32_fallback), \
2589 pDst, uSrc1, uSrc2, pEFlags); \
2590 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2591 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2592 IEM_MC_END(); \
2593 } \
2594 } \
2595 else \
2596 { \
2597 /* \
2598 * Register, memory. \
2599 */ \
2600 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2601 { \
2602 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2603 IEM_MC_ARG(uint64_t *, pDst, 0); \
2604 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2605 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2606 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2609 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2610 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2611 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2612 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2613 IEM_MC_REF_EFLAGS(pEFlags); \
2614 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2615 iemAImpl_ ## a_Instr ## _u64_fallback), \
2616 pDst, uSrc1, uSrc2, pEFlags); \
2617 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2618 IEM_MC_END(); \
2619 } \
2620 else \
2621 { \
2622 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2623 IEM_MC_ARG(uint32_t *, pDst, 0); \
2624 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2625 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2626 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2629 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2630 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2631 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2632 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2633 IEM_MC_REF_EFLAGS(pEFlags); \
2634 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2635 iemAImpl_ ## a_Instr ## _u32_fallback), \
2636 pDst, uSrc1, uSrc2, pEFlags); \
2637 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2638 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2639 IEM_MC_END(); \
2640 } \
2641 } \
2642 (void)0
2643
2644/** Body for SARX, SHLX, SHRX; assumes VEX.L must be 0. */
2645#define IEMOP_BODY_Gy_Ey_By_NoEflags(a_Instr, a_fFeatureMember) \
2646 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2647 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2648 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2649 { \
2650 /* \
2651 * Register, register. \
2652 */ \
2653 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2654 { \
2655 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2656 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2657 IEM_MC_ARG(uint64_t *, pDst, 0); \
2658 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2659 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2660 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2661 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2662 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2663 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2664 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2665 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2666 IEM_MC_END(); \
2667 } \
2668 else \
2669 { \
2670 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2671 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2672 IEM_MC_ARG(uint32_t *, pDst, 0); \
2673 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2674 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2675 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2676 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2677 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2678 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2679 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2680 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2681 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2682 IEM_MC_END(); \
2683 } \
2684 } \
2685 else \
2686 { \
2687 /* \
2688 * Register, memory. \
2689 */ \
2690 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2691 { \
2692 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2693 IEM_MC_ARG(uint64_t *, pDst, 0); \
2694 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2695 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2698 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2699 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2700 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2701 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2702 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2703 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2704 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2705 IEM_MC_END(); \
2706 } \
2707 else \
2708 { \
2709 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2710 IEM_MC_ARG(uint32_t *, pDst, 0); \
2711 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2712 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2715 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2716 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2717 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2718 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2719 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2720 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2721 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2723 IEM_MC_END(); \
2724 } \
2725 } \
2726 (void)0
2727
2728/**
2729 * @opcode 0xf5
2730 * @oppfx none
2731 * @opflmodify cf,pf,af,zf,sf,of
2732 * @opflclear of
2733 * @opflundef pf,af
2734 * @note VEX only
2735 */
2736FNIEMOP_DEF(iemOp_bzhi_Gy_Ey_By)
2737{
2738 IEMOP_MNEMONIC3(VEX_RMV, BZHI, bzhi, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2739 IEMOP_BODY_Gy_Ey_By(bzhi, fBmi2, X86_EFL_AF | X86_EFL_PF);
2740}
2741
2742/* Opcode VEX.66.0F38 0xf5 - invalid. */
2743
2744/** Body for PDEP and PEXT (similar to ANDN, except no EFLAGS). */
2745#define IEMOP_BODY_Gy_By_Ey_NoEflags(a_Instr, a_fFeatureMember) \
2746 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2748 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2749 { \
2750 /* \
2751 * Register, register. \
2752 */ \
2753 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2754 { \
2755 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2756 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2757 IEM_MC_ARG(uint64_t *, pDst, 0); \
2758 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2759 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2760 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2761 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2762 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2763 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2764 iemAImpl_ ## a_Instr ## _u64, \
2765 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2766 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2767 IEM_MC_END(); \
2768 } \
2769 else \
2770 { \
2771 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2772 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2773 IEM_MC_ARG(uint32_t *, pDst, 0); \
2774 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2775 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2776 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2777 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2778 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2779 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2780 iemAImpl_ ## a_Instr ## _u32, \
2781 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2782 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2783 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2784 IEM_MC_END(); \
2785 } \
2786 } \
2787 else \
2788 { \
2789 /* \
2790 * Register, memory. \
2791 */ \
2792 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2793 { \
2794 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2795 IEM_MC_ARG(uint64_t *, pDst, 0); \
2796 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2797 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2800 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2801 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2802 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2803 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2804 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2805 iemAImpl_ ## a_Instr ## _u64, \
2806 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2808 IEM_MC_END(); \
2809 } \
2810 else \
2811 { \
2812 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2813 IEM_MC_ARG(uint32_t *, pDst, 0); \
2814 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2815 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2818 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2819 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2820 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2821 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2822 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2823 iemAImpl_ ## a_Instr ## _u32, \
2824 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2825 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2826 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2827 IEM_MC_END(); \
2828 } \
2829 } \
2830 (void)0
2831
2832
2833/** Opcode VEX.F3.0F38 0xf5 (vex only). */
2834FNIEMOP_DEF(iemOp_pext_Gy_By_Ey)
2835{
2836 IEMOP_MNEMONIC3(VEX_RVM, PEXT, pext, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2837 IEMOP_BODY_Gy_By_Ey_NoEflags(pext, fBmi2);
2838}
2839
2840
2841/** Opcode VEX.F2.0F38 0xf5 (vex only). */
2842FNIEMOP_DEF(iemOp_pdep_Gy_By_Ey)
2843{
2844 IEMOP_MNEMONIC3(VEX_RVM, PDEP, pdep, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2845 IEMOP_BODY_Gy_By_Ey_NoEflags(pdep, fBmi2);
2846}
2847
2848
2849/* Opcode VEX.0F38 0xf6 - invalid. */
2850/* Opcode VEX.66.0F38 0xf6 - invalid (legacy only). */
2851/* Opcode VEX.F3.0F38 0xf6 - invalid (legacy only). */
2852
2853
2854/**
2855 * @opcode 0xf6
2856 * @oppfx 0xf2
2857 * @opflclass unchanged
2858 */
2859FNIEMOP_DEF(iemOp_mulx_By_Gy_rDX_Ey)
2860{
2861 IEMOP_MNEMONIC4(VEX_RVM, MULX, mulx, Gy, By, Ey, rDX, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2862 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2864 if (IEM_IS_MODRM_REG_MODE(bRm))
2865 {
2866 /*
2867 * Register, register.
2868 */
2869 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2870 {
2871 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2872 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2873 IEM_MC_ARG(uint64_t *, pDst1, 0);
2874 IEM_MC_ARG(uint64_t *, pDst2, 1);
2875 IEM_MC_ARG(uint64_t, uSrc1, 2);
2876 IEM_MC_ARG(uint64_t, uSrc2, 3);
2877 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
2878 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2879 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2880 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2881 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
2882 pDst1, pDst2, uSrc1, uSrc2);
2883 IEM_MC_ADVANCE_RIP_AND_FINISH();
2884 IEM_MC_END();
2885 }
2886 else
2887 {
2888 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2889 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2890 IEM_MC_ARG(uint32_t *, pDst1, 0);
2891 IEM_MC_ARG(uint32_t *, pDst2, 1);
2892 IEM_MC_ARG(uint32_t, uSrc1, 2);
2893 IEM_MC_ARG(uint32_t, uSrc2, 3);
2894 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
2895 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2896 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2897 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2898 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
2899 pDst1, pDst2, uSrc1, uSrc2);
2900 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu));
2901 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2902 IEM_MC_ADVANCE_RIP_AND_FINISH();
2903 IEM_MC_END();
2904 }
2905 }
2906 else
2907 {
2908 /*
2909 * Register, memory.
2910 */
2911 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2912 {
2913 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2914 IEM_MC_ARG(uint64_t *, pDst1, 0);
2915 IEM_MC_ARG(uint64_t *, pDst2, 1);
2916 IEM_MC_ARG(uint64_t, uSrc1, 2);
2917 IEM_MC_ARG(uint64_t, uSrc2, 3);
2918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2920 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2921 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2922 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
2923 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2924 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2925 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
2926 pDst1, pDst2, uSrc1, uSrc2);
2927 IEM_MC_ADVANCE_RIP_AND_FINISH();
2928 IEM_MC_END();
2929 }
2930 else
2931 {
2932 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2933 IEM_MC_ARG(uint32_t *, pDst1, 0);
2934 IEM_MC_ARG(uint32_t *, pDst2, 1);
2935 IEM_MC_ARG(uint32_t, uSrc1, 2);
2936 IEM_MC_ARG(uint32_t, uSrc2, 3);
2937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2939 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2940 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2941 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
2942 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2943 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2944 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
2945 pDst1, pDst2, uSrc1, uSrc2);
2946 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu));
2947 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2948 IEM_MC_ADVANCE_RIP_AND_FINISH();
2949 IEM_MC_END();
2950 }
2951 }
2952}
2953
2954
2955/**
2956 * @opcode 0xf7
2957 * @oppfx none
2958 * @opflmodify cf,pf,af,zf,sf,of
2959 * @opflclear cf,of
2960 * @opflundef pf,af,sf
2961 */
2962FNIEMOP_DEF(iemOp_bextr_Gy_Ey_By)
2963{
2964 IEMOP_MNEMONIC3(VEX_RMV, BEXTR, bextr, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2965 IEMOP_BODY_Gy_Ey_By(bextr, fBmi1, X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
2966}
2967
2968
2969/**
2970 * @opcode 0xf7
2971 * @oppfx 0x66
2972 * @opflclass unchanged
2973 */
2974FNIEMOP_DEF(iemOp_shlx_Gy_Ey_By)
2975{
2976 IEMOP_MNEMONIC3(VEX_RMV, SHLX, shlx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2977 IEMOP_BODY_Gy_Ey_By_NoEflags(shlx, fBmi2);
2978}
2979
2980
2981/**
2982 * @opcode 0xf7
2983 * @oppfx 0xf3
2984 * @opflclass unchanged
2985 */
2986FNIEMOP_DEF(iemOp_sarx_Gy_Ey_By)
2987{
2988 IEMOP_MNEMONIC3(VEX_RMV, SARX, sarx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2989 IEMOP_BODY_Gy_Ey_By_NoEflags(sarx, fBmi2);
2990}
2991
2992
2993/**
2994 * @opcode 0xf7
2995 * @oppfx 0xf2
2996 * @opflclass unchanged
2997 */
2998FNIEMOP_DEF(iemOp_shrx_Gy_Ey_By)
2999{
3000 IEMOP_MNEMONIC3(VEX_RMV, SHRX, shrx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3001 IEMOP_BODY_Gy_Ey_By_NoEflags(shrx, fBmi2);
3002}
3003
3004/* Opcode VEX.0F38 0xf8 - invalid. */
3005/* Opcode VEX.66.0F38 0xf8 - invalid. */
3006/* Opcode VEX.F3.0F38 0xf8 - invalid. */
3007/* Opcode VEX.F2.0F38 0xf8 - invalid. */
3008
3009/* Opcode VEX.0F38 0xf9 - invalid. */
3010/* Opcode VEX.66.0F38 0xf9 - invalid. */
3011/* Opcode VEX.F3.0F38 0xf9 - invalid. */
3012/* Opcode VEX.F2.0F38 0xf9 - invalid. */
3013
3014/* Opcode VEX.0F38 0xfa - invalid. */
3015/* Opcode VEX.66.0F38 0xfa - invalid. */
3016/* Opcode VEX.F3.0F38 0xfa - invalid. */
3017/* Opcode VEX.F2.0F38 0xfa - invalid. */
3018
3019/* Opcode VEX.0F38 0xfb - invalid. */
3020/* Opcode VEX.66.0F38 0xfb - invalid. */
3021/* Opcode VEX.F3.0F38 0xfb - invalid. */
3022/* Opcode VEX.F2.0F38 0xfb - invalid. */
3023
3024/* Opcode VEX.0F38 0xfc - invalid. */
3025/* Opcode VEX.66.0F38 0xfc - invalid. */
3026/* Opcode VEX.F3.0F38 0xfc - invalid. */
3027/* Opcode VEX.F2.0F38 0xfc - invalid. */
3028
3029/* Opcode VEX.0F38 0xfd - invalid. */
3030/* Opcode VEX.66.0F38 0xfd - invalid. */
3031/* Opcode VEX.F3.0F38 0xfd - invalid. */
3032/* Opcode VEX.F2.0F38 0xfd - invalid. */
3033
3034/* Opcode VEX.0F38 0xfe - invalid. */
3035/* Opcode VEX.66.0F38 0xfe - invalid. */
3036/* Opcode VEX.F3.0F38 0xfe - invalid. */
3037/* Opcode VEX.F2.0F38 0xfe - invalid. */
3038
3039/* Opcode VEX.0F38 0xff - invalid. */
3040/* Opcode VEX.66.0F38 0xff - invalid. */
3041/* Opcode VEX.F3.0F38 0xff - invalid. */
3042/* Opcode VEX.F2.0F38 0xff - invalid. */
3043
3044
3045/**
3046 * VEX opcode map \#2.
3047 *
3048 * @sa g_apfnThreeByte0f38
3049 */
3050const PFNIEMOP g_apfnVexMap2[] =
3051{
3052 /* no prefix, 066h prefix f3h prefix, f2h prefix */
3053 /* 0x00 */ iemOp_InvalidNeedRM, iemOp_vpshufb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3054 /* 0x01 */ iemOp_InvalidNeedRM, iemOp_vphaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3055 /* 0x02 */ iemOp_InvalidNeedRM, iemOp_vphaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3056 /* 0x03 */ iemOp_InvalidNeedRM, iemOp_vphaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3057 /* 0x04 */ iemOp_InvalidNeedRM, iemOp_vpmaddubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3058 /* 0x05 */ iemOp_InvalidNeedRM, iemOp_vphsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3059 /* 0x06 */ iemOp_InvalidNeedRM, iemOp_vphsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3060 /* 0x07 */ iemOp_InvalidNeedRM, iemOp_vphsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3061 /* 0x08 */ iemOp_InvalidNeedRM, iemOp_vpsignb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3062 /* 0x09 */ iemOp_InvalidNeedRM, iemOp_vpsignw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3063 /* 0x0a */ iemOp_InvalidNeedRM, iemOp_vpsignd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3064 /* 0x0b */ iemOp_InvalidNeedRM, iemOp_vpmulhrsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3065 /* 0x0c */ iemOp_InvalidNeedRM, iemOp_vpermilps_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3066 /* 0x0d */ iemOp_InvalidNeedRM, iemOp_vpermilpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3067 /* 0x0e */ iemOp_InvalidNeedRM, iemOp_vtestps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3068 /* 0x0f */ iemOp_InvalidNeedRM, iemOp_vtestpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3069
3070 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRM),
3071 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
3072 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
3073 /* 0x13 */ iemOp_InvalidNeedRM, iemOp_vcvtph2ps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3074 /* 0x14 */ IEMOP_X4(iemOp_InvalidNeedRM),
3075 /* 0x15 */ IEMOP_X4(iemOp_InvalidNeedRM),
3076 /* 0x16 */ iemOp_InvalidNeedRM, iemOp_vpermps_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3077 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_vptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3078 /* 0x18 */ iemOp_InvalidNeedRM, iemOp_vbroadcastss_Vx_Wd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3079 /* 0x19 */ iemOp_InvalidNeedRM, iemOp_vbroadcastsd_Vqq_Wq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3080 /* 0x1a */ iemOp_InvalidNeedRM, iemOp_vbroadcastf128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3081 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
3082 /* 0x1c */ iemOp_InvalidNeedRM, iemOp_vpabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3083 /* 0x1d */ iemOp_InvalidNeedRM, iemOp_vpabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3084 /* 0x1e */ iemOp_InvalidNeedRM, iemOp_vpabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3085 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
3086
3087 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3088 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3089 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3090 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3091 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3092 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_vpmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3093 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
3094 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
3095 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_vpmuldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3096 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3097 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_vmovntdqa_Vx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3098 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_vpackusdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3099 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3100 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3101 /* 0x2e */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3102 /* 0x2f */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3103
3104 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3105 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3106 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3107 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3108 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3109 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_vpmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3110 /* 0x36 */ iemOp_InvalidNeedRM, iemOp_vpermd_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3111 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3112 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_vpminsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3113 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_vpminsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3114 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_vpminuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3115 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_vpminud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3116 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_vpmaxsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3117 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_vpmaxsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3118 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_vpmaxuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3119 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_vpmaxud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3120
3121 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_vpmulld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3122 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_vphminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3123 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
3124 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
3125 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
3126 /* 0x45 */ iemOp_InvalidNeedRM, iemOp_vpsrlvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3127 /* 0x46 */ iemOp_InvalidNeedRM, iemOp_vpsravd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3128 /* 0x47 */ iemOp_InvalidNeedRM, iemOp_vpsllvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3129 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
3130 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
3131 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
3132 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
3133 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
3134 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
3135 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
3136 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
3137
3138 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
3139 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
3140 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
3141 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
3142 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
3143 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
3144 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
3145 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
3146 /* 0x58 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3147 /* 0x59 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3148 /* 0x5a */ iemOp_InvalidNeedRM, iemOp_vbroadcasti128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3149 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
3150 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
3151 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
3152 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
3153 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
3154
3155 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
3156 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
3157 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
3158 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
3159 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
3160 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
3161 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
3162 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
3163 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
3164 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
3165 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
3166 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
3167 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
3168 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
3169 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
3170 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
3171
3172 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
3173 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
3174 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
3175 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
3176 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
3177 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
3178 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
3179 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
3180 /* 0x78 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3181 /* 0x79 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3182 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
3183 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
3184 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
3185 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
3186 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
3187 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
3188
3189 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
3190 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
3191 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
3192 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
3193 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
3194 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
3195 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
3196 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
3197 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
3198 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
3199 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
3200 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
3201 /* 0x8c */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3202 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
3203 /* 0x8e */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Mx_Vx_Hx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3204 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
3205
3206 /* 0x90 */ iemOp_InvalidNeedRM, iemOp_vpgatherdd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3207 /* 0x91 */ iemOp_InvalidNeedRM, iemOp_vpgatherqd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3208 /* 0x92 */ iemOp_InvalidNeedRM, iemOp_vgatherdps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3209 /* 0x93 */ iemOp_InvalidNeedRM, iemOp_vgatherqps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3210 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
3211 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
3212 /* 0x96 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3213 /* 0x97 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3214 /* 0x98 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3215 /* 0x99 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3216 /* 0x9a */ iemOp_InvalidNeedRM, iemOp_vfmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3217 /* 0x9b */ iemOp_InvalidNeedRM, iemOp_vfmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3218 /* 0x9c */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3219 /* 0x9d */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3220 /* 0x9e */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3221 /* 0x9f */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3222
3223 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3224 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3225 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3226 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3227 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3228 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3229 /* 0xa6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3230 /* 0xa7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3231 /* 0xa8 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3232 /* 0xa9 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3233 /* 0xaa */ iemOp_InvalidNeedRM, iemOp_vfmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3234 /* 0xab */ iemOp_InvalidNeedRM, iemOp_vfmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3235 /* 0xac */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3236 /* 0xad */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3237 /* 0xae */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3238 /* 0xaf */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3239
3240 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3241 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3242 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3243 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3244 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3245 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3246 /* 0xb6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3247 /* 0xb7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3248 /* 0xb8 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3249 /* 0xb9 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3250 /* 0xba */ iemOp_InvalidNeedRM, iemOp_vfmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3251 /* 0xbb */ iemOp_InvalidNeedRM, iemOp_vfmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3252 /* 0xbc */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3253 /* 0xbd */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3254 /* 0xbe */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3255 /* 0xbf */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3256
3257 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3258 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3259 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3260 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3261 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3262 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3263 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3264 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3265 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3266 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3267 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
3268 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
3269 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
3270 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
3271 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
3272 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
3273
3274 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3275 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3276 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3277 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3278 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3279 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3280 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3281 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3282 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3283 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3284 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
3285 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vaesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3286 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vaesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3287 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vaesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3288 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vaesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3289 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vaesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3290
3291 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3292 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3293 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3294 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3295 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3296 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3297 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3298 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3299 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3300 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3301 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
3302 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
3303 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
3304 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
3305 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
3306 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
3307
3308 /* 0xf0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3309 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3310 /* 0xf2 */ iemOp_andn_Gy_By_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3311 /* 0xf3 */ iemOp_VGrp17_f3, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3312 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3313 /* 0xf5 */ iemOp_bzhi_Gy_Ey_By, iemOp_InvalidNeedRM, iemOp_pext_Gy_By_Ey, iemOp_pdep_Gy_By_Ey,
3314 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_mulx_By_Gy_rDX_Ey,
3315 /* 0xf7 */ iemOp_bextr_Gy_Ey_By, iemOp_shlx_Gy_Ey_By, iemOp_sarx_Gy_Ey_By, iemOp_shrx_Gy_Ey_By,
3316 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3317 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3318 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
3319 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
3320 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
3321 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
3322 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
3323 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
3324};
3325AssertCompile(RT_ELEMENTS(g_apfnVexMap2) == 1024);
3326
3327/** @} */
3328
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette