VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h@ 100732

Last change on this file since 100732 was 100714, checked in by vboxsync, 21 months ago

VMM/IEM: Require a IEMOP_HLP_DONE_DECODING in all MC blocks so we know exacly when the recompiler starts emitting code (calls) and we can make sure it's still safe to restart insturction decoding. Also made the python script check this and that nothing that smells like decoding happens after IEMOP_HLP_DONE_DECODING and its friends. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 106.9 KB
Line 
1/* $Id: IEMAllInstructionsVexMap2.cpp.h 100714 2023-07-27 10:12:09Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsThree0f38.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 2
33 * @{
34 */
35
36/* Opcode VEX.0F38 0x00 - invalid. */
37
38
39/** Opcode VEX.66.0F38 0x00. */
40FNIEMOP_DEF(iemOp_vpshufb_Vx_Hx_Wx)
41{
42 IEMOP_MNEMONIC3(VEX_RVM, VPSHUFB, vpshufb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
43 IEMOPMEDIAF3_INIT_VARS(vpshufb);
44 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
45}
46
47
48/* Opcode VEX.0F38 0x01 - invalid. */
49
50
51/** Opcode VEX.66.0F38 0x01. */
52FNIEMOP_DEF(iemOp_vphaddw_Vx_Hx_Wx)
53{
54 IEMOP_MNEMONIC3(VEX_RVM, VPHADDW, vphaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
55 IEMOPMEDIAOPTF3_INIT_VARS(vphaddw);
56 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
57}
58
59
60/* Opcode VEX.0F38 0x02 - invalid. */
61
62
63/** Opcode VEX.66.0F38 0x02. */
64FNIEMOP_DEF(iemOp_vphaddd_Vx_Hx_Wx)
65{
66 IEMOP_MNEMONIC3(VEX_RVM, VPHADDD, vphaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
67 IEMOPMEDIAOPTF3_INIT_VARS(vphaddd);
68 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
69}
70
71
72/* Opcode VEX.0F38 0x03 - invalid. */
73
74
75/** Opcode VEX.66.0F38 0x03. */
76FNIEMOP_DEF(iemOp_vphaddsw_Vx_Hx_Wx)
77{
78 IEMOP_MNEMONIC3(VEX_RVM, VPHADDSW, vphaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
79 IEMOPMEDIAOPTF3_INIT_VARS(vphaddsw);
80 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
81}
82
83
84/* Opcode VEX.0F38 0x04 - invalid. */
85
86
87/** Opcode VEX.66.0F38 0x04. */
88FNIEMOP_DEF(iemOp_vpmaddubsw_Vx_Hx_Wx)
89{
90 IEMOP_MNEMONIC3(VEX_RVM, VPMADDUBSW, vpmaddubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
91 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddubsw);
92 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
93}
94
95
96/* Opcode VEX.0F38 0x05 - invalid. */
97
98
99/** Opcode VEX.66.0F38 0x05. */
100FNIEMOP_DEF(iemOp_vphsubw_Vx_Hx_Wx)
101{
102 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBW, vphsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
103 IEMOPMEDIAOPTF3_INIT_VARS(vphsubw);
104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
105}
106
107
108/* Opcode VEX.0F38 0x06 - invalid. */
109
110
111/** Opcode VEX.66.0F38 0x06. */
112FNIEMOP_DEF(iemOp_vphsubd_Vx_Hx_Wx)
113{
114 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBD, vphsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
115 IEMOPMEDIAOPTF3_INIT_VARS(vphsubd);
116 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
117}
118
119
120/* Opcode VEX.0F38 0x07 - invalid. */
121
122
123/** Opcode VEX.66.0F38 0x07. */
124FNIEMOP_DEF(iemOp_vphsubsw_Vx_Hx_Wx)
125{
126 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBSW, vphsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
127 IEMOPMEDIAOPTF3_INIT_VARS(vphsubsw);
128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
129}
130
131
132/* Opcode VEX.0F38 0x08 - invalid. */
133
134
135/** Opcode VEX.66.0F38 0x08. */
136FNIEMOP_DEF(iemOp_vpsignb_Vx_Hx_Wx)
137{
138 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNB, vpsignb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
139 IEMOPMEDIAOPTF3_INIT_VARS(vpsignb);
140 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
141}
142
143
144/* Opcode VEX.0F38 0x09 - invalid. */
145
146
147/** Opcode VEX.66.0F38 0x09. */
148FNIEMOP_DEF(iemOp_vpsignw_Vx_Hx_Wx)
149{
150 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNW, vpsignw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
151 IEMOPMEDIAOPTF3_INIT_VARS(vpsignw);
152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
153}
154
155
156/* Opcode VEX.0F38 0x0a - invalid. */
157
158
159/** Opcode VEX.66.0F38 0x0a. */
160FNIEMOP_DEF(iemOp_vpsignd_Vx_Hx_Wx)
161{
162 IEMOP_MNEMONIC3(VEX_RVM, VPSIGND, vpsignd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
163 IEMOPMEDIAOPTF3_INIT_VARS(vpsignd);
164 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
165}
166
167
168/* Opcode VEX.0F38 0x0b - invalid. */
169
170
171/** Opcode VEX.66.0F38 0x0b. */
172FNIEMOP_DEF(iemOp_vpmulhrsw_Vx_Hx_Wx)
173{
174 IEMOP_MNEMONIC3(VEX_RVM, VPMULHRSW, vpmulhrsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
175 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhrsw);
176 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
177}
178
179
180/* Opcode VEX.0F38 0x0c - invalid. */
181/** Opcode VEX.66.0F38 0x0c. */
182FNIEMOP_STUB(iemOp_vpermilps_Vx_Hx_Wx);
183/* Opcode VEX.0F38 0x0d - invalid. */
184/** Opcode VEX.66.0F38 0x0d. */
185FNIEMOP_STUB(iemOp_vpermilpd_Vx_Hx_Wx);
186/* Opcode VEX.0F38 0x0e - invalid. */
187/** Opcode VEX.66.0F38 0x0e. */
188FNIEMOP_STUB(iemOp_vtestps_Vx_Wx);
189/* Opcode VEX.0F38 0x0f - invalid. */
190/** Opcode VEX.66.0F38 0x0f. */
191FNIEMOP_STUB(iemOp_vtestpd_Vx_Wx);
192
193
194/* Opcode VEX.0F38 0x10 - invalid */
195/* Opcode VEX.66.0F38 0x10 - invalid (legacy only). */
196/* Opcode VEX.0F38 0x11 - invalid */
197/* Opcode VEX.66.0F38 0x11 - invalid */
198/* Opcode VEX.0F38 0x12 - invalid */
199/* Opcode VEX.66.0F38 0x12 - invalid */
200/* Opcode VEX.0F38 0x13 - invalid */
201/* Opcode VEX.66.0F38 0x13 - invalid (vex only). */
202/* Opcode VEX.0F38 0x14 - invalid */
203/* Opcode VEX.66.0F38 0x14 - invalid (legacy only). */
204/* Opcode VEX.0F38 0x15 - invalid */
205/* Opcode VEX.66.0F38 0x15 - invalid (legacy only). */
206/* Opcode VEX.0F38 0x16 - invalid */
207/** Opcode VEX.66.0F38 0x16. */
208FNIEMOP_STUB(iemOp_vpermps_Vqq_Hqq_Wqq);
209/* Opcode VEX.0F38 0x17 - invalid */
210
211
212/** Opcode VEX.66.0F38 0x17 - invalid */
213FNIEMOP_DEF(iemOp_vptest_Vx_Wx)
214{
215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
216 if (IEM_IS_MODRM_REG_MODE(bRm))
217 {
218 /*
219 * Register, register.
220 */
221 if (pVCpu->iem.s.uVexLength)
222 {
223 IEM_MC_BEGIN(3, 2);
224 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
225 IEM_MC_LOCAL(RTUINT256U, uSrc1);
226 IEM_MC_LOCAL(RTUINT256U, uSrc2);
227 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
228 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
231 IEM_MC_PREPARE_AVX_USAGE();
232 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
233 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
234 IEM_MC_REF_EFLAGS(pEFlags);
235 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
236 puSrc1, puSrc2, pEFlags);
237 IEM_MC_ADVANCE_RIP_AND_FINISH();
238 IEM_MC_END();
239 }
240 else
241 {
242 IEM_MC_BEGIN(3, 0);
243 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
244 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
245 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
248 IEM_MC_PREPARE_AVX_USAGE();
249 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
250 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
251 IEM_MC_REF_EFLAGS(pEFlags);
252 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
253 IEM_MC_ADVANCE_RIP_AND_FINISH();
254 IEM_MC_END();
255 }
256 }
257 else
258 {
259 /*
260 * Register, memory.
261 */
262 if (pVCpu->iem.s.uVexLength)
263 {
264 IEM_MC_BEGIN(3, 3);
265 IEM_MC_LOCAL(RTUINT256U, uSrc1);
266 IEM_MC_LOCAL(RTUINT256U, uSrc2);
267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
268 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
269 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
271
272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
273 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
274 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
275 IEM_MC_PREPARE_AVX_USAGE();
276
277 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
278 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
279 IEM_MC_REF_EFLAGS(pEFlags);
280 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
281 puSrc1, puSrc2, pEFlags);
282
283 IEM_MC_ADVANCE_RIP_AND_FINISH();
284 IEM_MC_END();
285 }
286 else
287 {
288 IEM_MC_BEGIN(3, 2);
289 IEM_MC_LOCAL(RTUINT128U, uSrc2);
290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
291 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
292 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
293 IEM_MC_ARG(uint32_t *, pEFlags, 2);
294
295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
296 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
298 IEM_MC_PREPARE_AVX_USAGE();
299
300 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
301 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
302 IEM_MC_REF_EFLAGS(pEFlags);
303 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
304
305 IEM_MC_ADVANCE_RIP_AND_FINISH();
306 IEM_MC_END();
307 }
308 }
309}
310
311
312/* Opcode VEX.0F38 0x18 - invalid */
313
314
315/** Opcode VEX.66.0F38 0x18. */
316FNIEMOP_DEF(iemOp_vbroadcastss_Vx_Wd)
317{
318 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSS, vbroadcastss, Vx, Wx, DISOPTYPE_HARMLESS, 0);
319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
320 if (IEM_IS_MODRM_REG_MODE(bRm))
321 {
322 /*
323 * Register, register.
324 */
325 if (pVCpu->iem.s.uVexLength)
326 {
327 IEM_MC_BEGIN(0, 1);
328 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
329 IEM_MC_LOCAL(uint32_t, uSrc);
330
331 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
332 IEM_MC_PREPARE_AVX_USAGE();
333
334 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
335 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
336
337 IEM_MC_ADVANCE_RIP_AND_FINISH();
338 IEM_MC_END();
339 }
340 else
341 {
342 IEM_MC_BEGIN(0, 1);
343 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
344 IEM_MC_LOCAL(uint32_t, uSrc);
345
346 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
347 IEM_MC_PREPARE_AVX_USAGE();
348 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
349 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
350
351 IEM_MC_ADVANCE_RIP_AND_FINISH();
352 IEM_MC_END();
353 }
354 }
355 else
356 {
357 /*
358 * Register, memory.
359 */
360 if (pVCpu->iem.s.uVexLength)
361 {
362 IEM_MC_BEGIN(0, 2);
363 IEM_MC_LOCAL(uint32_t, uSrc);
364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
365
366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
367 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
369 IEM_MC_PREPARE_AVX_USAGE();
370
371 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
372 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
373
374 IEM_MC_ADVANCE_RIP_AND_FINISH();
375 IEM_MC_END();
376 }
377 else
378 {
379 IEM_MC_BEGIN(3, 3);
380 IEM_MC_LOCAL(uint32_t, uSrc);
381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
382
383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
384 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
385 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
386 IEM_MC_PREPARE_AVX_USAGE();
387
388 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
389 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394 }
395}
396
397
398/* Opcode VEX.0F38 0x19 - invalid */
399
400
401/** Opcode VEX.66.0F38 0x19. */
402FNIEMOP_DEF(iemOp_vbroadcastsd_Vqq_Wq)
403{
404 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSD, vbroadcastsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
406 if (IEM_IS_MODRM_REG_MODE(bRm))
407 {
408 /*
409 * Register, register.
410 */
411 if (pVCpu->iem.s.uVexLength)
412 {
413 IEM_MC_BEGIN(0, 1);
414 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
415 IEM_MC_LOCAL(uint64_t, uSrc);
416
417 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
418 IEM_MC_PREPARE_AVX_USAGE();
419
420 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
421 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
422
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 IEM_MC_BEGIN(0, 1);
429 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
430 IEM_MC_LOCAL(uint64_t, uSrc);
431
432 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
433 IEM_MC_PREPARE_AVX_USAGE();
434 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
435 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
436
437 IEM_MC_ADVANCE_RIP_AND_FINISH();
438 IEM_MC_END();
439 }
440 }
441 else
442 {
443 /*
444 * Register, memory.
445 */
446 IEM_MC_BEGIN(0, 2);
447 IEM_MC_LOCAL(uint64_t, uSrc);
448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
449
450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
451 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
452 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
453 IEM_MC_PREPARE_AVX_USAGE();
454
455 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
456 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
457
458 IEM_MC_ADVANCE_RIP_AND_FINISH();
459 IEM_MC_END();
460 }
461}
462
463
464/* Opcode VEX.0F38 0x1a - invalid */
465
466
467/** Opcode VEX.66.0F38 0x1a. */
468FNIEMOP_DEF(iemOp_vbroadcastf128_Vqq_Mdq)
469{
470 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTF128, vbroadcastf128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
472 if (IEM_IS_MODRM_REG_MODE(bRm))
473 {
474 /*
475 * No register, register.
476 */
477 IEMOP_RAISE_INVALID_OPCODE_RET();
478 }
479 else
480 {
481 /*
482 * Register, memory.
483 */
484 IEM_MC_BEGIN(0, 2);
485 IEM_MC_LOCAL(RTUINT128U, uSrc);
486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
487
488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
489 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
490 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
491 IEM_MC_PREPARE_AVX_USAGE();
492
493 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
494 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
495
496 IEM_MC_ADVANCE_RIP_AND_FINISH();
497 IEM_MC_END();
498 }
499}
500
501
502/* Opcode VEX.0F38 0x1b - invalid */
503/* Opcode VEX.66.0F38 0x1b - invalid */
504/* Opcode VEX.0F38 0x1c - invalid. */
505
506
507/** Opcode VEX.66.0F38 0x1c. */
508FNIEMOP_DEF(iemOp_vpabsb_Vx_Wx)
509{
510 IEMOP_MNEMONIC2(VEX_RM, VPABSB, vpabsb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
511 IEMOPMEDIAOPTF2_INIT_VARS(vpabsb);
512 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
513}
514
515
516/* Opcode VEX.0F38 0x1d - invalid. */
517
518
519/** Opcode VEX.66.0F38 0x1d. */
520FNIEMOP_DEF(iemOp_vpabsw_Vx_Wx)
521{
522 IEMOP_MNEMONIC2(VEX_RM, VPABSW, vpabsw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
523 IEMOPMEDIAOPTF2_INIT_VARS(vpabsw);
524 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
525}
526
527/* Opcode VEX.0F38 0x1e - invalid. */
528
529
530/** Opcode VEX.66.0F38 0x1e. */
531FNIEMOP_DEF(iemOp_vpabsd_Vx_Wx)
532{
533 IEMOP_MNEMONIC2(VEX_RM, VPABSD, vpabsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
534 IEMOPMEDIAOPTF2_INIT_VARS(vpabsd);
535 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
536}
537
538
539/* Opcode VEX.0F38 0x1f - invalid */
540/* Opcode VEX.66.0F38 0x1f - invalid */
541
542
543/** Body for the vpmov{s,z}x* instructions. */
544#define IEMOP_BODY_VPMOV_S_Z(a_Instr, a_SrcWidth) \
545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
546 if (IEM_IS_MODRM_REG_MODE(bRm)) \
547 { \
548 /* \
549 * Register, register. \
550 */ \
551 if (pVCpu->iem.s.uVexLength) \
552 { \
553 IEM_MC_BEGIN(2, 1); \
554 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
555 IEM_MC_LOCAL(RTUINT256U, uDst); \
556 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
557 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
559 IEM_MC_PREPARE_AVX_USAGE(); \
560 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
561 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
562 iemAImpl_ ## a_Instr ## _u256_fallback), \
563 puDst, puSrc); \
564 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
565 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
566 IEM_MC_END(); \
567 } \
568 else \
569 { \
570 IEM_MC_BEGIN(2, 0); \
571 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
572 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
573 IEM_MC_ARG(uint64_t, uSrc, 1); \
574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
575 IEM_MC_PREPARE_AVX_USAGE(); \
576 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); \
578 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
579 iemAImpl_## a_Instr ## _u128_fallback), \
580 puDst, uSrc); \
581 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 } \
585 } \
586 else \
587 { \
588 /* \
589 * Register, memory. \
590 */ \
591 if (pVCpu->iem.s.uVexLength) \
592 { \
593 IEM_MC_BEGIN(2, 3); \
594 IEM_MC_LOCAL(RTUINT256U, uDst); \
595 IEM_MC_LOCAL(RTUINT128U, uSrc); \
596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
597 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
598 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
600 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
601 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
602 IEM_MC_PREPARE_AVX_USAGE(); \
603 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
604 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
605 iemAImpl_ ## a_Instr ## _u256_fallback), \
606 puDst, puSrc); \
607 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
608 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
609 IEM_MC_END(); \
610 } \
611 else \
612 { \
613 IEM_MC_BEGIN(2, 1); \
614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
615 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
616 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
618 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
619 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
620 IEM_MC_PREPARE_AVX_USAGE(); \
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
622 IEM_MC_FETCH_MEM_U ## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
623 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
624 iemAImpl_ ## a_Instr ## _u128_fallback), \
625 puDst, uSrc); \
626 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
627 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
628 IEM_MC_END(); \
629 } \
630 } \
631 (void)0
632
633/** Opcode VEX.66.0F38 0x20. */
634FNIEMOP_DEF(iemOp_vpmovsxbw_Vx_UxMq)
635{
636 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
637 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBW, vpmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
638 IEMOP_BODY_VPMOV_S_Z(vpmovsxbw, 64);
639}
640
641
642/** Opcode VEX.66.0F38 0x21. */
643FNIEMOP_DEF(iemOp_vpmovsxbd_Vx_UxMd)
644{
645 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
646 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBD, vpmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
647 IEMOP_BODY_VPMOV_S_Z(vpmovsxbd, 32);
648}
649
650
651/** Opcode VEX.66.0F38 0x22. */
652FNIEMOP_DEF(iemOp_vpmovsxbq_Vx_UxMw)
653{
654 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
655 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBQ, vpmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
656 IEMOP_BODY_VPMOV_S_Z(vpmovsxbq, 16);
657}
658
659
660/** Opcode VEX.66.0F38 0x23. */
661FNIEMOP_DEF(iemOp_vpmovsxwd_Vx_UxMq)
662{
663 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
664 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWD, vpmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
665 IEMOP_BODY_VPMOV_S_Z(vpmovsxwd, 64);
666}
667
668
669/** Opcode VEX.66.0F38 0x24. */
670FNIEMOP_DEF(iemOp_vpmovsxwq_Vx_UxMd)
671{
672 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
673 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWQ, vpmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
674 IEMOP_BODY_VPMOV_S_Z(vpmovsxwq, 32);
675}
676
677
678/** Opcode VEX.66.0F38 0x25. */
679FNIEMOP_DEF(iemOp_vpmovsxdq_Vx_UxMq)
680{
681 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
682 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXDQ, vpmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
683 IEMOP_BODY_VPMOV_S_Z(vpmovsxdq, 64);
684}
685
686
687/* Opcode VEX.66.0F38 0x26 - invalid */
688/* Opcode VEX.66.0F38 0x27 - invalid */
689
690
691/** Opcode VEX.66.0F38 0x28. */
692FNIEMOP_DEF(iemOp_vpmuldq_Vx_Hx_Wx)
693{
694 IEMOP_MNEMONIC3(VEX_RVM, VPMULDQ, vpmuldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
695 IEMOPMEDIAOPTF3_INIT_VARS(vpmuldq);
696 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
697}
698
699
700/** Opcode VEX.66.0F38 0x29. */
701FNIEMOP_DEF(iemOp_vpcmpeqq_Vx_Hx_Wx)
702{
703 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQQ, vpcmpeqq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
704 IEMOPMEDIAF3_INIT_VARS(vpcmpeqq);
705 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
706}
707
708
709FNIEMOP_DEF(iemOp_vmovntdqa_Vx_Mx)
710{
711 Assert(pVCpu->iem.s.uVexLength <= 1);
712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
713 if (IEM_IS_MODRM_MEM_MODE(bRm))
714 {
715 if (pVCpu->iem.s.uVexLength == 0)
716 {
717 /**
718 * @opcode 0x2a
719 * @opcodesub !11 mr/reg vex.l=0
720 * @oppfx 0x66
721 * @opcpuid avx
722 * @opgroup og_avx_cachect
723 * @opxcpttype 1
724 * @optest op1=-1 op2=2 -> op1=2
725 * @optest op1=0 op2=-42 -> op1=-42
726 */
727 /* 128-bit: Memory, register. */
728 IEMOP_MNEMONIC2EX(vmovntdqa_Vdq_WO_Mdq_L0, "vmovntdqa, Vdq_WO, Mdq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
729 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
730 IEM_MC_BEGIN(0, 2);
731 IEM_MC_LOCAL(RTUINT128U, uSrc);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
736 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
737 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
738
739 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
740 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
741
742 IEM_MC_ADVANCE_RIP_AND_FINISH();
743 IEM_MC_END();
744 }
745 else
746 {
747 /**
748 * @opdone
749 * @opcode 0x2a
750 * @opcodesub !11 mr/reg vex.l=1
751 * @oppfx 0x66
752 * @opcpuid avx2
753 * @opgroup og_avx2_cachect
754 * @opxcpttype 1
755 * @optest op1=-1 op2=2 -> op1=2
756 * @optest op1=0 op2=-42 -> op1=-42
757 */
758 /* 256-bit: Memory, register. */
759 IEMOP_MNEMONIC2EX(vmovntdqa_Vqq_WO_Mqq_L1, "vmovntdqa, Vqq_WO,Mqq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
760 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
761 IEM_MC_BEGIN(0, 2);
762 IEM_MC_LOCAL(RTUINT256U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
768 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
769
770 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
771 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
772
773 IEM_MC_ADVANCE_RIP_AND_FINISH();
774 IEM_MC_END();
775 }
776 }
777
778 /**
779 * @opdone
780 * @opmnemonic udvex660f382arg
781 * @opcode 0x2a
782 * @opcodesub 11 mr/reg
783 * @oppfx 0x66
784 * @opunused immediate
785 * @opcpuid avx
786 * @optest ->
787 */
788 else
789 IEMOP_RAISE_INVALID_OPCODE_RET();
790}
791
792
793/** Opcode VEX.66.0F38 0x2b. */
794FNIEMOP_DEF(iemOp_vpackusdw_Vx_Hx_Wx)
795{
796 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSDW, vpackusdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
797 IEMOPMEDIAOPTF3_INIT_VARS( vpackusdw);
798 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
799}
800
801
802/** Opcode VEX.66.0F38 0x2c. */
803FNIEMOP_STUB(iemOp_vmaskmovps_Vx_Hx_Mx);
804/** Opcode VEX.66.0F38 0x2d. */
805FNIEMOP_STUB(iemOp_vmaskmovpd_Vx_Hx_Mx);
806/** Opcode VEX.66.0F38 0x2e. */
807FNIEMOP_STUB(iemOp_vmaskmovps_Mx_Hx_Vx);
808/** Opcode VEX.66.0F38 0x2f. */
809FNIEMOP_STUB(iemOp_vmaskmovpd_Mx_Hx_Vx);
810
811
812/** Opcode VEX.66.0F38 0x30. */
813FNIEMOP_DEF(iemOp_vpmovzxbw_Vx_UxMq)
814{
815 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
816 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBW, vpmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
817 IEMOP_BODY_VPMOV_S_Z(vpmovzxbw, 64);
818}
819
820
821/** Opcode VEX.66.0F38 0x31. */
822FNIEMOP_DEF(iemOp_vpmovzxbd_Vx_UxMd)
823{
824 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
825 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBD, vpmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
826 IEMOP_BODY_VPMOV_S_Z(vpmovzxbd, 32);
827}
828
829
830/** Opcode VEX.66.0F38 0x32. */
831FNIEMOP_DEF(iemOp_vpmovzxbq_Vx_UxMw)
832{
833 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
834 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBQ, vpmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
835 IEMOP_BODY_VPMOV_S_Z(vpmovzxbq, 16);
836}
837
838
839/** Opcode VEX.66.0F38 0x33. */
840FNIEMOP_DEF(iemOp_vpmovzxwd_Vx_UxMq)
841{
842 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
843 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWD, vpmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
844 IEMOP_BODY_VPMOV_S_Z(vpmovzxwd, 64);
845}
846
847
848/** Opcode VEX.66.0F38 0x34. */
849FNIEMOP_DEF(iemOp_vpmovzxwq_Vx_UxMd)
850{
851 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
852 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWQ, vpmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
853 IEMOP_BODY_VPMOV_S_Z(vpmovzxwq, 32);
854}
855
856
857/** Opcode VEX.66.0F38 0x35. */
858FNIEMOP_DEF(iemOp_vpmovzxdq_Vx_UxMq)
859{
860 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
861 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXDQ, vpmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
862 IEMOP_BODY_VPMOV_S_Z(vpmovzxdq, 64);
863}
864
865
866/* Opcode VEX.66.0F38 0x36. */
867FNIEMOP_STUB(iemOp_vpermd_Vqq_Hqq_Wqq);
868
869
870/** Opcode VEX.66.0F38 0x37. */
871FNIEMOP_DEF(iemOp_vpcmpgtq_Vx_Hx_Wx)
872{
873 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTQ, vpcmpgtq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
874 IEMOPMEDIAF3_INIT_VARS(vpcmpgtq);
875 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
876}
877
878
879/** Opcode VEX.66.0F38 0x38. */
880FNIEMOP_DEF(iemOp_vpminsb_Vx_Hx_Wx)
881{
882 IEMOP_MNEMONIC3(VEX_RVM, VPMINSB, vpminsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
883 IEMOPMEDIAF3_INIT_VARS(vpminsb);
884 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
885}
886
887
888/** Opcode VEX.66.0F38 0x39. */
889FNIEMOP_DEF(iemOp_vpminsd_Vx_Hx_Wx)
890{
891 IEMOP_MNEMONIC3(VEX_RVM, VPMINSD, vpminsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
892 IEMOPMEDIAF3_INIT_VARS(vpminsd);
893 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
894}
895
896
897/** Opcode VEX.66.0F38 0x3a. */
898FNIEMOP_DEF(iemOp_vpminuw_Vx_Hx_Wx)
899{
900 IEMOP_MNEMONIC3(VEX_RVM, VPMINUW, vpminuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
901 IEMOPMEDIAF3_INIT_VARS(vpminuw);
902 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
903}
904
905
906/** Opcode VEX.66.0F38 0x3b. */
907FNIEMOP_DEF(iemOp_vpminud_Vx_Hx_Wx)
908{
909 IEMOP_MNEMONIC3(VEX_RVM, VPMINUD, vpminud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
910 IEMOPMEDIAF3_INIT_VARS(vpminud);
911 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
912}
913
914
915/** Opcode VEX.66.0F38 0x3c. */
916FNIEMOP_DEF(iemOp_vpmaxsb_Vx_Hx_Wx)
917{
918 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSB, vpmaxsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
919 IEMOPMEDIAF3_INIT_VARS(vpmaxsb);
920 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
921}
922
923
924/** Opcode VEX.66.0F38 0x3d. */
925FNIEMOP_DEF(iemOp_vpmaxsd_Vx_Hx_Wx)
926{
927 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSD, vpmaxsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
928 IEMOPMEDIAF3_INIT_VARS(vpmaxsd);
929 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
930}
931
932
933/** Opcode VEX.66.0F38 0x3e. */
934FNIEMOP_DEF(iemOp_vpmaxuw_Vx_Hx_Wx)
935{
936 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUW, vpmaxuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
937 IEMOPMEDIAF3_INIT_VARS(vpmaxuw);
938 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
939}
940
941
942/** Opcode VEX.66.0F38 0x3f. */
943FNIEMOP_DEF(iemOp_vpmaxud_Vx_Hx_Wx)
944{
945 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUD, vpmaxud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
946 IEMOPMEDIAF3_INIT_VARS(vpmaxud);
947 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
948}
949
950
951/** Opcode VEX.66.0F38 0x40. */
952FNIEMOP_DEF(iemOp_vpmulld_Vx_Hx_Wx)
953{
954 IEMOP_MNEMONIC3(VEX_RVM, VPMULLD, vpmulld, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
955 IEMOPMEDIAOPTF3_INIT_VARS(vpmulld);
956 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
957}
958
959
960/** Opcode VEX.66.0F38 0x41. */
961FNIEMOP_DEF(iemOp_vphminposuw_Vdq_Wdq)
962{
963 IEMOP_MNEMONIC2(VEX_RM, VPHMINPOSUW, vphminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
965 if (IEM_IS_MODRM_REG_MODE(bRm))
966 {
967 /*
968 * Register, register.
969 */
970 IEM_MC_BEGIN(2, 0);
971 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
972 IEM_MC_ARG(PRTUINT128U, puDst, 0);
973 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
974 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
975 IEM_MC_PREPARE_AVX_USAGE();
976 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
977 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
978 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
979 puDst, puSrc);
980 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
981 IEM_MC_ADVANCE_RIP_AND_FINISH();
982 IEM_MC_END();
983 }
984 else
985 {
986 /*
987 * Register, memory.
988 */
989 IEM_MC_BEGIN(2, 2);
990 IEM_MC_LOCAL(RTUINT128U, uSrc);
991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
992 IEM_MC_ARG(PRTUINT128U, puDst, 0);
993 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
994
995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
996 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
997 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
998 IEM_MC_PREPARE_AVX_USAGE();
999
1000 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1001 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1002 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
1003 puDst, puSrc);
1004 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1005
1006 IEM_MC_ADVANCE_RIP_AND_FINISH();
1007 IEM_MC_END();
1008 }
1009}
1010
1011
1012/* Opcode VEX.66.0F38 0x42 - invalid. */
1013/* Opcode VEX.66.0F38 0x43 - invalid. */
1014/* Opcode VEX.66.0F38 0x44 - invalid. */
1015/** Opcode VEX.66.0F38 0x45. */
1016FNIEMOP_STUB(iemOp_vpsrlvd_q_Vx_Hx_Wx);
1017/** Opcode VEX.66.0F38 0x46. */
1018FNIEMOP_STUB(iemOp_vsravd_Vx_Hx_Wx);
1019/** Opcode VEX.66.0F38 0x47. */
1020FNIEMOP_STUB(iemOp_vpsllvd_q_Vx_Hx_Wx);
1021/* Opcode VEX.66.0F38 0x48 - invalid. */
1022/* Opcode VEX.66.0F38 0x49 - invalid. */
1023/* Opcode VEX.66.0F38 0x4a - invalid. */
1024/* Opcode VEX.66.0F38 0x4b - invalid. */
1025/* Opcode VEX.66.0F38 0x4c - invalid. */
1026/* Opcode VEX.66.0F38 0x4d - invalid. */
1027/* Opcode VEX.66.0F38 0x4e - invalid. */
1028/* Opcode VEX.66.0F38 0x4f - invalid. */
1029
1030/* Opcode VEX.66.0F38 0x50 - invalid. */
1031/* Opcode VEX.66.0F38 0x51 - invalid. */
1032/* Opcode VEX.66.0F38 0x52 - invalid. */
1033/* Opcode VEX.66.0F38 0x53 - invalid. */
1034/* Opcode VEX.66.0F38 0x54 - invalid. */
1035/* Opcode VEX.66.0F38 0x55 - invalid. */
1036/* Opcode VEX.66.0F38 0x56 - invalid. */
1037/* Opcode VEX.66.0F38 0x57 - invalid. */
1038
1039
1040/** Opcode VEX.66.0F38 0x58. */
1041FNIEMOP_DEF(iemOp_vpbroadcastd_Vx_Wx)
1042{
1043 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTD, vpbroadcastd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1045 if (IEM_IS_MODRM_REG_MODE(bRm))
1046 {
1047 /*
1048 * Register, register.
1049 */
1050 if (pVCpu->iem.s.uVexLength)
1051 {
1052 IEM_MC_BEGIN(0, 1);
1053 IEM_MC_LOCAL(uint32_t, uSrc);
1054
1055 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1056 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1057 IEM_MC_PREPARE_AVX_USAGE();
1058
1059 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1060 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065 else
1066 {
1067 IEM_MC_BEGIN(0, 1);
1068 IEM_MC_LOCAL(uint32_t, uSrc);
1069
1070 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1071 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1072 IEM_MC_PREPARE_AVX_USAGE();
1073 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1074 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1075
1076 IEM_MC_ADVANCE_RIP_AND_FINISH();
1077 IEM_MC_END();
1078 }
1079 }
1080 else
1081 {
1082 /*
1083 * Register, memory.
1084 */
1085 if (pVCpu->iem.s.uVexLength)
1086 {
1087 IEM_MC_BEGIN(0, 2);
1088 IEM_MC_LOCAL(uint32_t, uSrc);
1089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1090
1091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1092 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1093 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1094 IEM_MC_PREPARE_AVX_USAGE();
1095
1096 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1097 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1098
1099 IEM_MC_ADVANCE_RIP_AND_FINISH();
1100 IEM_MC_END();
1101 }
1102 else
1103 {
1104 IEM_MC_BEGIN(3, 3);
1105 IEM_MC_LOCAL(uint32_t, uSrc);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1110 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1111 IEM_MC_PREPARE_AVX_USAGE();
1112
1113 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1114 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1115
1116 IEM_MC_ADVANCE_RIP_AND_FINISH();
1117 IEM_MC_END();
1118 }
1119 }
1120}
1121
1122
1123/** Opcode VEX.66.0F38 0x59. */
1124FNIEMOP_DEF(iemOp_vpbroadcastq_Vx_Wx)
1125{
1126 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTQ, vpbroadcastq, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1128 if (IEM_IS_MODRM_REG_MODE(bRm))
1129 {
1130 /*
1131 * Register, register.
1132 */
1133 if (pVCpu->iem.s.uVexLength)
1134 {
1135 IEM_MC_BEGIN(0, 1);
1136 IEM_MC_LOCAL(uint64_t, uSrc);
1137
1138 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1140 IEM_MC_PREPARE_AVX_USAGE();
1141
1142 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1143 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1144
1145 IEM_MC_ADVANCE_RIP_AND_FINISH();
1146 IEM_MC_END();
1147 }
1148 else
1149 {
1150 IEM_MC_BEGIN(0, 1);
1151 IEM_MC_LOCAL(uint64_t, uSrc);
1152
1153 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1154 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1155 IEM_MC_PREPARE_AVX_USAGE();
1156 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1157 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 }
1163 else
1164 {
1165 /*
1166 * Register, memory.
1167 */
1168 if (pVCpu->iem.s.uVexLength)
1169 {
1170 IEM_MC_BEGIN(0, 2);
1171 IEM_MC_LOCAL(uint64_t, uSrc);
1172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1173
1174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1175 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1176 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1177 IEM_MC_PREPARE_AVX_USAGE();
1178
1179 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1180 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1181
1182 IEM_MC_ADVANCE_RIP_AND_FINISH();
1183 IEM_MC_END();
1184 }
1185 else
1186 {
1187 IEM_MC_BEGIN(3, 3);
1188 IEM_MC_LOCAL(uint64_t, uSrc);
1189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1190
1191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1192 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1193 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1194 IEM_MC_PREPARE_AVX_USAGE();
1195
1196 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1197 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1198
1199 IEM_MC_ADVANCE_RIP_AND_FINISH();
1200 IEM_MC_END();
1201 }
1202 }
1203}
1204
1205
1206/** Opcode VEX.66.0F38 0x5a. */
1207FNIEMOP_DEF(iemOp_vbroadcasti128_Vqq_Mdq)
1208{
1209 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTI128, vbroadcasti128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1211 if (IEM_IS_MODRM_REG_MODE(bRm))
1212 {
1213 /*
1214 * No register, register.
1215 */
1216 IEMOP_RAISE_INVALID_OPCODE_RET();
1217 }
1218 else
1219 {
1220 /*
1221 * Register, memory.
1222 */
1223 IEM_MC_BEGIN(0, 2);
1224 IEM_MC_LOCAL(RTUINT128U, uSrc);
1225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1226
1227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1228 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
1229 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1230 IEM_MC_PREPARE_AVX_USAGE();
1231
1232 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1233 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1234
1235 IEM_MC_ADVANCE_RIP_AND_FINISH();
1236 IEM_MC_END();
1237 }
1238}
1239
1240
1241/* Opcode VEX.66.0F38 0x5b - invalid. */
1242/* Opcode VEX.66.0F38 0x5c - invalid. */
1243/* Opcode VEX.66.0F38 0x5d - invalid. */
1244/* Opcode VEX.66.0F38 0x5e - invalid. */
1245/* Opcode VEX.66.0F38 0x5f - invalid. */
1246
1247/* Opcode VEX.66.0F38 0x60 - invalid. */
1248/* Opcode VEX.66.0F38 0x61 - invalid. */
1249/* Opcode VEX.66.0F38 0x62 - invalid. */
1250/* Opcode VEX.66.0F38 0x63 - invalid. */
1251/* Opcode VEX.66.0F38 0x64 - invalid. */
1252/* Opcode VEX.66.0F38 0x65 - invalid. */
1253/* Opcode VEX.66.0F38 0x66 - invalid. */
1254/* Opcode VEX.66.0F38 0x67 - invalid. */
1255/* Opcode VEX.66.0F38 0x68 - invalid. */
1256/* Opcode VEX.66.0F38 0x69 - invalid. */
1257/* Opcode VEX.66.0F38 0x6a - invalid. */
1258/* Opcode VEX.66.0F38 0x6b - invalid. */
1259/* Opcode VEX.66.0F38 0x6c - invalid. */
1260/* Opcode VEX.66.0F38 0x6d - invalid. */
1261/* Opcode VEX.66.0F38 0x6e - invalid. */
1262/* Opcode VEX.66.0F38 0x6f - invalid. */
1263
1264/* Opcode VEX.66.0F38 0x70 - invalid. */
1265/* Opcode VEX.66.0F38 0x71 - invalid. */
1266/* Opcode VEX.66.0F38 0x72 - invalid. */
1267/* Opcode VEX.66.0F38 0x73 - invalid. */
1268/* Opcode VEX.66.0F38 0x74 - invalid. */
1269/* Opcode VEX.66.0F38 0x75 - invalid. */
1270/* Opcode VEX.66.0F38 0x76 - invalid. */
1271/* Opcode VEX.66.0F38 0x77 - invalid. */
1272
1273
1274/** Opcode VEX.66.0F38 0x78. */
1275FNIEMOP_DEF(iemOp_vpbroadcastb_Vx_Wx)
1276{
1277 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTB, vpbroadcastb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1279 if (IEM_IS_MODRM_REG_MODE(bRm))
1280 {
1281 /*
1282 * Register, register.
1283 */
1284 if (pVCpu->iem.s.uVexLength)
1285 {
1286 IEM_MC_BEGIN(0, 1);
1287 IEM_MC_LOCAL(uint8_t, uSrc);
1288
1289 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1290 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1291 IEM_MC_PREPARE_AVX_USAGE();
1292
1293 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1294 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1295
1296 IEM_MC_ADVANCE_RIP_AND_FINISH();
1297 IEM_MC_END();
1298 }
1299 else
1300 {
1301 IEM_MC_BEGIN(0, 1);
1302 IEM_MC_LOCAL(uint8_t, uSrc);
1303
1304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1305 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1306 IEM_MC_PREPARE_AVX_USAGE();
1307 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1308 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1309
1310 IEM_MC_ADVANCE_RIP_AND_FINISH();
1311 IEM_MC_END();
1312 }
1313 }
1314 else
1315 {
1316 /*
1317 * Register, memory.
1318 */
1319 if (pVCpu->iem.s.uVexLength)
1320 {
1321 IEM_MC_BEGIN(0, 2);
1322 IEM_MC_LOCAL(uint8_t, uSrc);
1323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1324
1325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1326 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1327 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1328 IEM_MC_PREPARE_AVX_USAGE();
1329
1330 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1331 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1332
1333 IEM_MC_ADVANCE_RIP_AND_FINISH();
1334 IEM_MC_END();
1335 }
1336 else
1337 {
1338 IEM_MC_BEGIN(3, 3);
1339 IEM_MC_LOCAL(uint8_t, uSrc);
1340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1341
1342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1343 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1344 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1345 IEM_MC_PREPARE_AVX_USAGE();
1346
1347 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1348 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1349
1350 IEM_MC_ADVANCE_RIP_AND_FINISH();
1351 IEM_MC_END();
1352 }
1353 }
1354}
1355
1356
1357/** Opcode VEX.66.0F38 0x79. */
1358FNIEMOP_DEF(iemOp_vpbroadcastw_Vx_Wx)
1359{
1360 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTW, vpbroadcastw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1362 if (IEM_IS_MODRM_REG_MODE(bRm))
1363 {
1364 /*
1365 * Register, register.
1366 */
1367 if (pVCpu->iem.s.uVexLength)
1368 {
1369 IEM_MC_BEGIN(0, 1);
1370 IEM_MC_LOCAL(uint16_t, uSrc);
1371
1372 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1374 IEM_MC_PREPARE_AVX_USAGE();
1375
1376 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1377 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1378
1379 IEM_MC_ADVANCE_RIP_AND_FINISH();
1380 IEM_MC_END();
1381 }
1382 else
1383 {
1384 IEM_MC_BEGIN(0, 1);
1385 IEM_MC_LOCAL(uint16_t, uSrc);
1386
1387 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1388 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1389 IEM_MC_PREPARE_AVX_USAGE();
1390 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1391 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1392
1393 IEM_MC_ADVANCE_RIP_AND_FINISH();
1394 IEM_MC_END();
1395 }
1396 }
1397 else
1398 {
1399 /*
1400 * Register, memory.
1401 */
1402 if (pVCpu->iem.s.uVexLength)
1403 {
1404 IEM_MC_BEGIN(0, 2);
1405 IEM_MC_LOCAL(uint16_t, uSrc);
1406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1407
1408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1409 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1410 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1411 IEM_MC_PREPARE_AVX_USAGE();
1412
1413 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1414 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1415
1416 IEM_MC_ADVANCE_RIP_AND_FINISH();
1417 IEM_MC_END();
1418 }
1419 else
1420 {
1421 IEM_MC_BEGIN(3, 3);
1422 IEM_MC_LOCAL(uint16_t, uSrc);
1423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1424
1425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1426 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1427 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1428 IEM_MC_PREPARE_AVX_USAGE();
1429
1430 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1431 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1432
1433 IEM_MC_ADVANCE_RIP_AND_FINISH();
1434 IEM_MC_END();
1435 }
1436 }
1437}
1438
1439
1440/* Opcode VEX.66.0F38 0x7a - invalid. */
1441/* Opcode VEX.66.0F38 0x7b - invalid. */
1442/* Opcode VEX.66.0F38 0x7c - invalid. */
1443/* Opcode VEX.66.0F38 0x7d - invalid. */
1444/* Opcode VEX.66.0F38 0x7e - invalid. */
1445/* Opcode VEX.66.0F38 0x7f - invalid. */
1446
1447/* Opcode VEX.66.0F38 0x80 - invalid (legacy only). */
1448/* Opcode VEX.66.0F38 0x81 - invalid (legacy only). */
1449/* Opcode VEX.66.0F38 0x82 - invalid (legacy only). */
1450/* Opcode VEX.66.0F38 0x83 - invalid. */
1451/* Opcode VEX.66.0F38 0x84 - invalid. */
1452/* Opcode VEX.66.0F38 0x85 - invalid. */
1453/* Opcode VEX.66.0F38 0x86 - invalid. */
1454/* Opcode VEX.66.0F38 0x87 - invalid. */
1455/* Opcode VEX.66.0F38 0x88 - invalid. */
1456/* Opcode VEX.66.0F38 0x89 - invalid. */
1457/* Opcode VEX.66.0F38 0x8a - invalid. */
1458/* Opcode VEX.66.0F38 0x8b - invalid. */
1459/** Opcode VEX.66.0F38 0x8c. */
1460FNIEMOP_STUB(iemOp_vpmaskmovd_q_Vx_Hx_Mx);
1461/* Opcode VEX.66.0F38 0x8d - invalid. */
1462/** Opcode VEX.66.0F38 0x8e. */
1463FNIEMOP_STUB(iemOp_vpmaskmovd_q_Mx_Vx_Hx);
1464/* Opcode VEX.66.0F38 0x8f - invalid. */
1465
1466/** Opcode VEX.66.0F38 0x90 (vex only). */
1467FNIEMOP_STUB(iemOp_vgatherdd_q_Vx_Hx_Wx);
1468/** Opcode VEX.66.0F38 0x91 (vex only). */
1469FNIEMOP_STUB(iemOp_vgatherqd_q_Vx_Hx_Wx);
1470/** Opcode VEX.66.0F38 0x92 (vex only). */
1471FNIEMOP_STUB(iemOp_vgatherdps_d_Vx_Hx_Wx);
1472/** Opcode VEX.66.0F38 0x93 (vex only). */
1473FNIEMOP_STUB(iemOp_vgatherqps_d_Vx_Hx_Wx);
1474/* Opcode VEX.66.0F38 0x94 - invalid. */
1475/* Opcode VEX.66.0F38 0x95 - invalid. */
1476/** Opcode VEX.66.0F38 0x96 (vex only). */
1477FNIEMOP_STUB(iemOp_vfmaddsub132ps_q_Vx_Hx_Wx);
1478/** Opcode VEX.66.0F38 0x97 (vex only). */
1479FNIEMOP_STUB(iemOp_vfmsubadd132ps_d_Vx_Hx_Wx);
1480/** Opcode VEX.66.0F38 0x98 (vex only). */
1481FNIEMOP_STUB(iemOp_vfmadd132ps_d_Vx_Hx_Wx);
1482/** Opcode VEX.66.0F38 0x99 (vex only). */
1483FNIEMOP_STUB(iemOp_vfmadd132ss_d_Vx_Hx_Wx);
1484/** Opcode VEX.66.0F38 0x9a (vex only). */
1485FNIEMOP_STUB(iemOp_vfmsub132ps_d_Vx_Hx_Wx);
1486/** Opcode VEX.66.0F38 0x9b (vex only). */
1487FNIEMOP_STUB(iemOp_vfmsub132ss_d_Vx_Hx_Wx);
1488/** Opcode VEX.66.0F38 0x9c (vex only). */
1489FNIEMOP_STUB(iemOp_vfnmadd132ps_d_Vx_Hx_Wx);
1490/** Opcode VEX.66.0F38 0x9d (vex only). */
1491FNIEMOP_STUB(iemOp_vfnmadd132ss_d_Vx_Hx_Wx);
1492/** Opcode VEX.66.0F38 0x9e (vex only). */
1493FNIEMOP_STUB(iemOp_vfnmsub132ps_d_Vx_Hx_Wx);
1494/** Opcode VEX.66.0F38 0x9f (vex only). */
1495FNIEMOP_STUB(iemOp_vfnmsub132ss_d_Vx_Hx_Wx);
1496
1497/* Opcode VEX.66.0F38 0xa0 - invalid. */
1498/* Opcode VEX.66.0F38 0xa1 - invalid. */
1499/* Opcode VEX.66.0F38 0xa2 - invalid. */
1500/* Opcode VEX.66.0F38 0xa3 - invalid. */
1501/* Opcode VEX.66.0F38 0xa4 - invalid. */
1502/* Opcode VEX.66.0F38 0xa5 - invalid. */
1503/** Opcode VEX.66.0F38 0xa6 (vex only). */
1504FNIEMOP_STUB(iemOp_vfmaddsub213ps_d_Vx_Hx_Wx);
1505/** Opcode VEX.66.0F38 0xa7 (vex only). */
1506FNIEMOP_STUB(iemOp_vfmsubadd213ps_d_Vx_Hx_Wx);
1507/** Opcode VEX.66.0F38 0xa8 (vex only). */
1508FNIEMOP_STUB(iemOp_vfmadd213ps_d_Vx_Hx_Wx);
1509/** Opcode VEX.66.0F38 0xa9 (vex only). */
1510FNIEMOP_STUB(iemOp_vfmadd213ss_d_Vx_Hx_Wx);
1511/** Opcode VEX.66.0F38 0xaa (vex only). */
1512FNIEMOP_STUB(iemOp_vfmsub213ps_d_Vx_Hx_Wx);
1513/** Opcode VEX.66.0F38 0xab (vex only). */
1514FNIEMOP_STUB(iemOp_vfmsub213ss_d_Vx_Hx_Wx);
1515/** Opcode VEX.66.0F38 0xac (vex only). */
1516FNIEMOP_STUB(iemOp_vfnmadd213ps_d_Vx_Hx_Wx);
1517/** Opcode VEX.66.0F38 0xad (vex only). */
1518FNIEMOP_STUB(iemOp_vfnmadd213ss_d_Vx_Hx_Wx);
1519/** Opcode VEX.66.0F38 0xae (vex only). */
1520FNIEMOP_STUB(iemOp_vfnmsub213ps_d_Vx_Hx_Wx);
1521/** Opcode VEX.66.0F38 0xaf (vex only). */
1522FNIEMOP_STUB(iemOp_vfnmsub213ss_d_Vx_Hx_Wx);
1523
1524/* Opcode VEX.66.0F38 0xb0 - invalid. */
1525/* Opcode VEX.66.0F38 0xb1 - invalid. */
1526/* Opcode VEX.66.0F38 0xb2 - invalid. */
1527/* Opcode VEX.66.0F38 0xb3 - invalid. */
1528/* Opcode VEX.66.0F38 0xb4 - invalid. */
1529/* Opcode VEX.66.0F38 0xb5 - invalid. */
1530/** Opcode VEX.66.0F38 0xb6 (vex only). */
1531FNIEMOP_STUB(iemOp_vfmaddsub231ps_d_Vx_Hx_Wx);
1532/** Opcode VEX.66.0F38 0xb7 (vex only). */
1533FNIEMOP_STUB(iemOp_vfmsubadd231ps_d_Vx_Hx_Wx);
1534/** Opcode VEX.66.0F38 0xb8 (vex only). */
1535FNIEMOP_STUB(iemOp_vfmadd231ps_d_Vx_Hx_Wx);
1536/** Opcode VEX.66.0F38 0xb9 (vex only). */
1537FNIEMOP_STUB(iemOp_vfmadd231ss_d_Vx_Hx_Wx);
1538/** Opcode VEX.66.0F38 0xba (vex only). */
1539FNIEMOP_STUB(iemOp_vfmsub231ps_d_Vx_Hx_Wx);
1540/** Opcode VEX.66.0F38 0xbb (vex only). */
1541FNIEMOP_STUB(iemOp_vfmsub231ss_d_Vx_Hx_Wx);
1542/** Opcode VEX.66.0F38 0xbc (vex only). */
1543FNIEMOP_STUB(iemOp_vfnmadd231ps_d_Vx_Hx_Wx);
1544/** Opcode VEX.66.0F38 0xbd (vex only). */
1545FNIEMOP_STUB(iemOp_vfnmadd231ss_d_Vx_Hx_Wx);
1546/** Opcode VEX.66.0F38 0xbe (vex only). */
1547FNIEMOP_STUB(iemOp_vfnmsub231ps_d_Vx_Hx_Wx);
1548/** Opcode VEX.66.0F38 0xbf (vex only). */
1549FNIEMOP_STUB(iemOp_vfnmsub231ss_d_Vx_Hx_Wx);
1550
1551/* Opcode VEX.0F38 0xc0 - invalid. */
1552/* Opcode VEX.66.0F38 0xc0 - invalid. */
1553/* Opcode VEX.0F38 0xc1 - invalid. */
1554/* Opcode VEX.66.0F38 0xc1 - invalid. */
1555/* Opcode VEX.0F38 0xc2 - invalid. */
1556/* Opcode VEX.66.0F38 0xc2 - invalid. */
1557/* Opcode VEX.0F38 0xc3 - invalid. */
1558/* Opcode VEX.66.0F38 0xc3 - invalid. */
1559/* Opcode VEX.0F38 0xc4 - invalid. */
1560/* Opcode VEX.66.0F38 0xc4 - invalid. */
1561/* Opcode VEX.0F38 0xc5 - invalid. */
1562/* Opcode VEX.66.0F38 0xc5 - invalid. */
1563/* Opcode VEX.0F38 0xc6 - invalid. */
1564/* Opcode VEX.66.0F38 0xc6 - invalid. */
1565/* Opcode VEX.0F38 0xc7 - invalid. */
1566/* Opcode VEX.66.0F38 0xc7 - invalid. */
1567/* Opcode VEX.0F38 0xc8 - invalid. */
1568/* Opcode VEX.66.0F38 0xc8 - invalid. */
1569/* Opcode VEX.0F38 0xc9 - invalid. */
1570/* Opcode VEX.66.0F38 0xc9 - invalid. */
1571/* Opcode VEX.0F38 0xca. */
1572/* Opcode VEX.66.0F38 0xca - invalid. */
1573/* Opcode VEX.0F38 0xcb - invalid. */
1574/* Opcode VEX.66.0F38 0xcb - invalid. */
1575/* Opcode VEX.0F38 0xcc - invalid. */
1576/* Opcode VEX.66.0F38 0xcc - invalid. */
1577/* Opcode VEX.0F38 0xcd - invalid. */
1578/* Opcode VEX.66.0F38 0xcd - invalid. */
1579/* Opcode VEX.0F38 0xce - invalid. */
1580/* Opcode VEX.66.0F38 0xce - invalid. */
1581/* Opcode VEX.0F38 0xcf - invalid. */
1582/* Opcode VEX.66.0F38 0xcf - invalid. */
1583
1584/* Opcode VEX.66.0F38 0xd0 - invalid. */
1585/* Opcode VEX.66.0F38 0xd1 - invalid. */
1586/* Opcode VEX.66.0F38 0xd2 - invalid. */
1587/* Opcode VEX.66.0F38 0xd3 - invalid. */
1588/* Opcode VEX.66.0F38 0xd4 - invalid. */
1589/* Opcode VEX.66.0F38 0xd5 - invalid. */
1590/* Opcode VEX.66.0F38 0xd6 - invalid. */
1591/* Opcode VEX.66.0F38 0xd7 - invalid. */
1592/* Opcode VEX.66.0F38 0xd8 - invalid. */
1593/* Opcode VEX.66.0F38 0xd9 - invalid. */
1594/* Opcode VEX.66.0F38 0xda - invalid. */
1595/** Opcode VEX.66.0F38 0xdb. */
1596FNIEMOP_STUB(iemOp_vaesimc_Vdq_Wdq);
1597/** Opcode VEX.66.0F38 0xdc. */
1598FNIEMOP_STUB(iemOp_vaesenc_Vdq_Wdq);
1599/** Opcode VEX.66.0F38 0xdd. */
1600FNIEMOP_STUB(iemOp_vaesenclast_Vdq_Wdq);
1601/** Opcode VEX.66.0F38 0xde. */
1602FNIEMOP_STUB(iemOp_vaesdec_Vdq_Wdq);
1603/** Opcode VEX.66.0F38 0xdf. */
1604FNIEMOP_STUB(iemOp_vaesdeclast_Vdq_Wdq);
1605
1606/* Opcode VEX.66.0F38 0xe0 - invalid. */
1607/* Opcode VEX.66.0F38 0xe1 - invalid. */
1608/* Opcode VEX.66.0F38 0xe2 - invalid. */
1609/* Opcode VEX.66.0F38 0xe3 - invalid. */
1610/* Opcode VEX.66.0F38 0xe4 - invalid. */
1611/* Opcode VEX.66.0F38 0xe5 - invalid. */
1612/* Opcode VEX.66.0F38 0xe6 - invalid. */
1613/* Opcode VEX.66.0F38 0xe7 - invalid. */
1614/* Opcode VEX.66.0F38 0xe8 - invalid. */
1615/* Opcode VEX.66.0F38 0xe9 - invalid. */
1616/* Opcode VEX.66.0F38 0xea - invalid. */
1617/* Opcode VEX.66.0F38 0xeb - invalid. */
1618/* Opcode VEX.66.0F38 0xec - invalid. */
1619/* Opcode VEX.66.0F38 0xed - invalid. */
1620/* Opcode VEX.66.0F38 0xee - invalid. */
1621/* Opcode VEX.66.0F38 0xef - invalid. */
1622
1623
1624/* Opcode VEX.0F38 0xf0 - invalid (legacy only). */
1625/* Opcode VEX.66.0F38 0xf0 - invalid (legacy only). */
1626/* Opcode VEX.F3.0F38 0xf0 - invalid. */
1627/* Opcode VEX.F2.0F38 0xf0 - invalid (legacy only). */
1628
1629/* Opcode VEX.0F38 0xf1 - invalid (legacy only). */
1630/* Opcode VEX.66.0F38 0xf1 - invalid (legacy only). */
1631/* Opcode VEX.F3.0F38 0xf1 - invalid. */
1632/* Opcode VEX.F2.0F38 0xf1 - invalid (legacy only). */
1633
1634/** Opcode VEX.0F38 0xf2 - ANDN (vex only). */
1635FNIEMOP_DEF(iemOp_andn_Gy_By_Ey)
1636{
1637 IEMOP_MNEMONIC3(VEX_RVM, ANDN, andn, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1638 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF);
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if (IEM_IS_MODRM_REG_MODE(bRm))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1646 {
1647 IEM_MC_BEGIN(4, 0);
1648 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
1649 IEM_MC_ARG(uint64_t *, pDst, 0);
1650 IEM_MC_ARG(uint64_t, uSrc1, 1);
1651 IEM_MC_ARG(uint64_t, uSrc2, 2);
1652 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1653 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1654 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1655 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1656 IEM_MC_REF_EFLAGS(pEFlags);
1657 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
1658 pDst, uSrc1, uSrc2, pEFlags);
1659 IEM_MC_ADVANCE_RIP_AND_FINISH();
1660 IEM_MC_END();
1661 }
1662 else
1663 {
1664 IEM_MC_BEGIN(4, 0);
1665 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
1666 IEM_MC_ARG(uint32_t *, pDst, 0);
1667 IEM_MC_ARG(uint32_t, uSrc1, 1);
1668 IEM_MC_ARG(uint32_t, uSrc2, 2);
1669 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1670 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1671 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1672 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1673 IEM_MC_REF_EFLAGS(pEFlags);
1674 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
1675 pDst, uSrc1, uSrc2, pEFlags);
1676 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
1677 IEM_MC_ADVANCE_RIP_AND_FINISH();
1678 IEM_MC_END();
1679 }
1680 }
1681 else
1682 {
1683 /*
1684 * Register, memory.
1685 */
1686 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1687 {
1688 IEM_MC_BEGIN(4, 1);
1689 IEM_MC_ARG(uint64_t *, pDst, 0);
1690 IEM_MC_ARG(uint64_t, uSrc1, 1);
1691 IEM_MC_ARG(uint64_t, uSrc2, 2);
1692 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1695 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
1696 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1697 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1698 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1699 IEM_MC_REF_EFLAGS(pEFlags);
1700 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
1701 pDst, uSrc1, uSrc2, pEFlags);
1702 IEM_MC_ADVANCE_RIP_AND_FINISH();
1703 IEM_MC_END();
1704 }
1705 else
1706 {
1707 IEM_MC_BEGIN(4, 1);
1708 IEM_MC_ARG(uint32_t *, pDst, 0);
1709 IEM_MC_ARG(uint32_t, uSrc1, 1);
1710 IEM_MC_ARG(uint32_t, uSrc2, 2);
1711 IEM_MC_ARG(uint32_t *, pEFlags, 3);
1712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1714 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
1715 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1716 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1717 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1718 IEM_MC_REF_EFLAGS(pEFlags);
1719 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
1720 pDst, uSrc1, uSrc2, pEFlags);
1721 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
1722 IEM_MC_ADVANCE_RIP_AND_FINISH();
1723 IEM_MC_END();
1724 }
1725 }
1726}
1727
1728/* Opcode VEX.66.0F38 0xf2 - invalid. */
1729/* Opcode VEX.F3.0F38 0xf2 - invalid. */
1730/* Opcode VEX.F2.0F38 0xf2 - invalid. */
1731
1732
1733/* Opcode VEX.0F38 0xf3 - invalid. */
1734/* Opcode VEX.66.0F38 0xf3 - invalid. */
1735
1736/* Opcode VEX.F3.0F38 0xf3 /0 - invalid. */
1737
1738/** Body for the vex group 17 instructions. */
1739#define IEMOP_BODY_By_Ey(a_Instr) \
1740 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF); \
1741 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1742 { \
1743 /* \
1744 * Register, register. \
1745 */ \
1746 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1747 { \
1748 IEM_MC_BEGIN(3, 0); \
1749 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
1750 IEM_MC_ARG(uint64_t *, pDst, 0); \
1751 IEM_MC_ARG(uint64_t, uSrc, 1); \
1752 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1753 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1754 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1755 IEM_MC_REF_EFLAGS(pEFlags); \
1756 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
1757 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \
1758 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1759 IEM_MC_END(); \
1760 } \
1761 else \
1762 { \
1763 IEM_MC_BEGIN(3, 0); \
1764 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
1765 IEM_MC_ARG(uint32_t *, pDst, 0); \
1766 IEM_MC_ARG(uint32_t, uSrc, 1); \
1767 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1768 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1769 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1770 IEM_MC_REF_EFLAGS(pEFlags); \
1771 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
1772 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \
1773 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1775 IEM_MC_END(); \
1776 } \
1777 } \
1778 else \
1779 { \
1780 /* \
1781 * Register, memory. \
1782 */ \
1783 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1784 { \
1785 IEM_MC_BEGIN(3, 1); \
1786 IEM_MC_ARG(uint64_t *, pDst, 0); \
1787 IEM_MC_ARG(uint64_t, uSrc, 1); \
1788 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1791 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
1792 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1793 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1794 IEM_MC_REF_EFLAGS(pEFlags); \
1795 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
1796 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \
1797 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1798 IEM_MC_END(); \
1799 } \
1800 else \
1801 { \
1802 IEM_MC_BEGIN(3, 1); \
1803 IEM_MC_ARG(uint32_t *, pDst, 0); \
1804 IEM_MC_ARG(uint32_t, uSrc, 1); \
1805 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
1806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1808 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
1809 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1810 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1811 IEM_MC_REF_EFLAGS(pEFlags); \
1812 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
1813 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \
1814 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1815 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1816 IEM_MC_END(); \
1817 } \
1818 } \
1819 (void)0
1820
1821
1822/* Opcode VEX.F3.0F38 0xf3 /1. */
1823/** @opcode /1
1824 * @opmaps vexgrp17 */
1825FNIEMOP_DEF_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm)
1826{
1827 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1828 IEMOP_BODY_By_Ey(blsr);
1829}
1830
1831
1832/* Opcode VEX.F3.0F38 0xf3 /2. */
1833/** @opcode /2
1834 * @opmaps vexgrp17 */
1835FNIEMOP_DEF_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm)
1836{
1837 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1838 IEMOP_BODY_By_Ey(blsmsk);
1839}
1840
1841
1842/* Opcode VEX.F3.0F38 0xf3 /3. */
1843/** @opcode /3
1844 * @opmaps vexgrp17 */
1845FNIEMOP_DEF_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm)
1846{
1847 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1848 IEMOP_BODY_By_Ey(blsi);
1849}
1850
1851
1852/* Opcode VEX.F3.0F38 0xf3 /4 - invalid. */
1853/* Opcode VEX.F3.0F38 0xf3 /5 - invalid. */
1854/* Opcode VEX.F3.0F38 0xf3 /6 - invalid. */
1855/* Opcode VEX.F3.0F38 0xf3 /7 - invalid. */
1856
1857/**
1858 * Group 17 jump table for the VEX.F3 variant.
1859 */
1860IEM_STATIC const PFNIEMOPRM g_apfnVexGroup17_f3[] =
1861{
1862 /* /0 */ iemOp_InvalidWithRM,
1863 /* /1 */ iemOp_VGrp17_blsr_By_Ey,
1864 /* /2 */ iemOp_VGrp17_blsmsk_By_Ey,
1865 /* /3 */ iemOp_VGrp17_blsi_By_Ey,
1866 /* /4 */ iemOp_InvalidWithRM,
1867 /* /5 */ iemOp_InvalidWithRM,
1868 /* /6 */ iemOp_InvalidWithRM,
1869 /* /7 */ iemOp_InvalidWithRM
1870};
1871AssertCompile(RT_ELEMENTS(g_apfnVexGroup17_f3) == 8);
1872
1873/** Opcode VEX.F3.0F38 0xf3 - invalid (vex only - group 17). */
1874FNIEMOP_DEF(iemOp_VGrp17_f3)
1875{
1876 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1877 return FNIEMOP_CALL_1(g_apfnVexGroup17_f3[IEM_GET_MODRM_REG_8(bRm)], bRm);
1878}
1879
1880/* Opcode VEX.F2.0F38 0xf3 - invalid (vex only - group 17). */
1881
1882
1883/* Opcode VEX.0F38 0xf4 - invalid. */
1884/* Opcode VEX.66.0F38 0xf4 - invalid. */
1885/* Opcode VEX.F3.0F38 0xf4 - invalid. */
1886/* Opcode VEX.F2.0F38 0xf4 - invalid. */
1887
1888/** Body for BZHI, BEXTR, ++; assumes VEX.L must be 0. */
1889#define IEMOP_BODY_Gy_Ey_By(a_Instr, a_fFeatureMember, a_fUndefFlags) \
1890 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
1891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1892 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1893 { \
1894 /* \
1895 * Register, register. \
1896 */ \
1897 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1898 { \
1899 IEM_MC_BEGIN(4, 0); \
1900 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
1901 IEM_MC_ARG(uint64_t *, pDst, 0); \
1902 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1903 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1904 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1905 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1906 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1907 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1908 IEM_MC_REF_EFLAGS(pEFlags); \
1909 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1910 iemAImpl_ ## a_Instr ## _u64_fallback), \
1911 pDst, uSrc1, uSrc2, pEFlags); \
1912 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1913 IEM_MC_END(); \
1914 } \
1915 else \
1916 { \
1917 IEM_MC_BEGIN(4, 0); \
1918 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
1919 IEM_MC_ARG(uint32_t *, pDst, 0); \
1920 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1921 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1922 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1923 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1924 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
1925 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1926 IEM_MC_REF_EFLAGS(pEFlags); \
1927 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1928 iemAImpl_ ## a_Instr ## _u32_fallback), \
1929 pDst, uSrc1, uSrc2, pEFlags); \
1930 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1931 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1932 IEM_MC_END(); \
1933 } \
1934 } \
1935 else \
1936 { \
1937 /* \
1938 * Register, memory. \
1939 */ \
1940 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1941 { \
1942 IEM_MC_BEGIN(4, 1); \
1943 IEM_MC_ARG(uint64_t *, pDst, 0); \
1944 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1945 IEM_MC_ARG(uint64_t, uSrc2, 2); \
1946 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1949 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
1950 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1951 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1952 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1953 IEM_MC_REF_EFLAGS(pEFlags); \
1954 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
1955 iemAImpl_ ## a_Instr ## _u64_fallback), \
1956 pDst, uSrc1, uSrc2, pEFlags); \
1957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1958 IEM_MC_END(); \
1959 } \
1960 else \
1961 { \
1962 IEM_MC_BEGIN(4, 1); \
1963 IEM_MC_ARG(uint32_t *, pDst, 0); \
1964 IEM_MC_ARG(uint32_t, uSrc1, 1); \
1965 IEM_MC_ARG(uint32_t, uSrc2, 2); \
1966 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
1967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1969 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
1970 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
1971 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
1972 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
1973 IEM_MC_REF_EFLAGS(pEFlags); \
1974 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
1975 iemAImpl_ ## a_Instr ## _u32_fallback), \
1976 pDst, uSrc1, uSrc2, pEFlags); \
1977 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
1978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1979 IEM_MC_END(); \
1980 } \
1981 } \
1982 (void)0
1983
1984/** Body for SARX, SHLX, SHRX; assumes VEX.L must be 0. */
1985#define IEMOP_BODY_Gy_Ey_By_NoEflags(a_Instr, a_fFeatureMember, a_fUndefFlags) \
1986 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
1987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1988 if (IEM_IS_MODRM_REG_MODE(bRm)) \
1989 { \
1990 /* \
1991 * Register, register. \
1992 */ \
1993 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
1994 { \
1995 IEM_MC_BEGIN(3, 0); \
1996 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
1997 IEM_MC_ARG(uint64_t *, pDst, 0); \
1998 IEM_MC_ARG(uint64_t, uSrc1, 1); \
1999 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2000 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2001 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2002 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2003 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2004 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2006 IEM_MC_END(); \
2007 } \
2008 else \
2009 { \
2010 IEM_MC_BEGIN(3, 0); \
2011 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2012 IEM_MC_ARG(uint32_t *, pDst, 0); \
2013 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2014 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2015 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2016 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2017 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2018 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2019 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2020 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
2021 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2022 IEM_MC_END(); \
2023 } \
2024 } \
2025 else \
2026 { \
2027 /* \
2028 * Register, memory. \
2029 */ \
2030 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2031 { \
2032 IEM_MC_BEGIN(3, 1); \
2033 IEM_MC_ARG(uint64_t *, pDst, 0); \
2034 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2035 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2038 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2039 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2040 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2041 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2042 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2043 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2044 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2045 IEM_MC_END(); \
2046 } \
2047 else \
2048 { \
2049 IEM_MC_BEGIN(3, 1); \
2050 IEM_MC_ARG(uint32_t *, pDst, 0); \
2051 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2052 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2055 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2056 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2057 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2058 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2059 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2060 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2061 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
2062 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2063 IEM_MC_END(); \
2064 } \
2065 } \
2066 (void)0
2067
2068/** Opcode VEX.0F38 0xf5 (vex only). */
2069FNIEMOP_DEF(iemOp_bzhi_Gy_Ey_By)
2070{
2071 IEMOP_MNEMONIC3(VEX_RMV, BZHI, bzhi, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2072 IEMOP_BODY_Gy_Ey_By(bzhi, fBmi2, X86_EFL_AF | X86_EFL_PF);
2073}
2074
2075/* Opcode VEX.66.0F38 0xf5 - invalid. */
2076
2077/** Body for PDEP and PEXT (similar to ANDN, except no EFLAGS). */
2078#define IEMOP_BODY_Gy_By_Ey_NoEflags(a_Instr, a_fFeatureMember) \
2079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2080 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2081 { \
2082 /* \
2083 * Register, register. \
2084 */ \
2085 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2086 { \
2087 IEM_MC_BEGIN(3, 0); \
2088 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2089 IEM_MC_ARG(uint64_t *, pDst, 0); \
2090 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2091 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2092 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2093 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2094 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2095 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2096 iemAImpl_ ## a_Instr ## _u64, \
2097 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2098 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2099 IEM_MC_END(); \
2100 } \
2101 else \
2102 { \
2103 IEM_MC_BEGIN(3, 0); \
2104 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2105 IEM_MC_ARG(uint32_t *, pDst, 0); \
2106 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2107 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2108 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2109 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2110 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2111 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2112 iemAImpl_ ## a_Instr ## _u32, \
2113 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2114 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
2115 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2116 IEM_MC_END(); \
2117 } \
2118 } \
2119 else \
2120 { \
2121 /* \
2122 * Register, memory. \
2123 */ \
2124 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2125 { \
2126 IEM_MC_BEGIN(3, 1); \
2127 IEM_MC_ARG(uint64_t *, pDst, 0); \
2128 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2129 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2132 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2133 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2134 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2135 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2136 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2137 iemAImpl_ ## a_Instr ## _u64, \
2138 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2139 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2140 IEM_MC_END(); \
2141 } \
2142 else \
2143 { \
2144 IEM_MC_BEGIN(3, 1); \
2145 IEM_MC_ARG(uint32_t *, pDst, 0); \
2146 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2147 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2150 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2151 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2152 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2153 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2154 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
2155 iemAImpl_ ## a_Instr ## _u32, \
2156 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2157 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \
2158 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2159 IEM_MC_END(); \
2160 } \
2161 } \
2162 (void)0
2163
2164
2165/** Opcode VEX.F3.0F38 0xf5 (vex only). */
2166FNIEMOP_DEF(iemOp_pext_Gy_By_Ey)
2167{
2168 IEMOP_MNEMONIC3(VEX_RVM, PEXT, pext, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2169 IEMOP_BODY_Gy_By_Ey_NoEflags(pext, fBmi2);
2170}
2171
2172
2173/** Opcode VEX.F2.0F38 0xf5 (vex only). */
2174FNIEMOP_DEF(iemOp_pdep_Gy_By_Ey)
2175{
2176 IEMOP_MNEMONIC3(VEX_RVM, PDEP, pdep, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2177 IEMOP_BODY_Gy_By_Ey_NoEflags(pdep, fBmi2);
2178}
2179
2180
2181/* Opcode VEX.0F38 0xf6 - invalid. */
2182/* Opcode VEX.66.0F38 0xf6 - invalid (legacy only). */
2183/* Opcode VEX.F3.0F38 0xf6 - invalid (legacy only). */
2184
2185
2186/** Opcode VEX.F2.0F38 0xf6 (vex only) */
2187FNIEMOP_DEF(iemOp_mulx_By_Gy_rDX_Ey)
2188{
2189 IEMOP_MNEMONIC4(VEX_RVM, MULX, mulx, Gy, By, Ey, rDX, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2191 if (IEM_IS_MODRM_REG_MODE(bRm))
2192 {
2193 /*
2194 * Register, register.
2195 */
2196 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2197 {
2198 IEM_MC_BEGIN(4, 0);
2199 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2200 IEM_MC_ARG(uint64_t *, pDst1, 0);
2201 IEM_MC_ARG(uint64_t *, pDst2, 1);
2202 IEM_MC_ARG(uint64_t, uSrc1, 2);
2203 IEM_MC_ARG(uint64_t, uSrc2, 3);
2204 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2205 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2206 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
2207 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2208 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
2209 pDst1, pDst2, uSrc1, uSrc2);
2210 IEM_MC_ADVANCE_RIP_AND_FINISH();
2211 IEM_MC_END();
2212 }
2213 else
2214 {
2215 IEM_MC_BEGIN(4, 0);
2216 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2217 IEM_MC_ARG(uint32_t *, pDst1, 0);
2218 IEM_MC_ARG(uint32_t *, pDst2, 1);
2219 IEM_MC_ARG(uint32_t, uSrc1, 2);
2220 IEM_MC_ARG(uint32_t, uSrc2, 3);
2221 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2222 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2223 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
2224 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2225 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
2226 pDst1, pDst2, uSrc1, uSrc2);
2227 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst2);
2228 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst1);
2229 IEM_MC_ADVANCE_RIP_AND_FINISH();
2230 IEM_MC_END();
2231 }
2232 }
2233 else
2234 {
2235 /*
2236 * Register, memory.
2237 */
2238 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2239 {
2240 IEM_MC_BEGIN(4, 1);
2241 IEM_MC_ARG(uint64_t *, pDst1, 0);
2242 IEM_MC_ARG(uint64_t *, pDst2, 1);
2243 IEM_MC_ARG(uint64_t, uSrc1, 2);
2244 IEM_MC_ARG(uint64_t, uSrc2, 3);
2245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2247 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2248 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2249 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
2250 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2251 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2252 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
2253 pDst1, pDst2, uSrc1, uSrc2);
2254 IEM_MC_ADVANCE_RIP_AND_FINISH();
2255 IEM_MC_END();
2256 }
2257 else
2258 {
2259 IEM_MC_BEGIN(4, 1);
2260 IEM_MC_ARG(uint32_t *, pDst1, 0);
2261 IEM_MC_ARG(uint32_t *, pDst2, 1);
2262 IEM_MC_ARG(uint32_t, uSrc1, 2);
2263 IEM_MC_ARG(uint32_t, uSrc2, 3);
2264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2266 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
2267 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2268 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
2269 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2270 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
2271 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
2272 pDst1, pDst2, uSrc1, uSrc2);
2273 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst2);
2274 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst1);
2275 IEM_MC_ADVANCE_RIP_AND_FINISH();
2276 IEM_MC_END();
2277 }
2278 }
2279}
2280
2281
2282/** Opcode VEX.0F38 0xf7 (vex only). */
2283FNIEMOP_DEF(iemOp_bextr_Gy_Ey_By)
2284{
2285 IEMOP_MNEMONIC3(VEX_RMV, BEXTR, bextr, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2286 IEMOP_BODY_Gy_Ey_By(bextr, fBmi1, X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
2287}
2288
2289
2290/** Opcode VEX.66.0F38 0xf7 (vex only). */
2291FNIEMOP_DEF(iemOp_shlx_Gy_Ey_By)
2292{
2293 IEMOP_MNEMONIC3(VEX_RMV, SHLX, shlx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2294 IEMOP_BODY_Gy_Ey_By_NoEflags(shlx, fBmi2, 0);
2295}
2296
2297
2298/** Opcode VEX.F3.0F38 0xf7 (vex only). */
2299FNIEMOP_DEF(iemOp_sarx_Gy_Ey_By)
2300{
2301 IEMOP_MNEMONIC3(VEX_RMV, SARX, sarx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2302 IEMOP_BODY_Gy_Ey_By_NoEflags(sarx, fBmi2, 0);
2303}
2304
2305
2306/** Opcode VEX.F2.0F38 0xf7 (vex only). */
2307FNIEMOP_DEF(iemOp_shrx_Gy_Ey_By)
2308{
2309 IEMOP_MNEMONIC3(VEX_RMV, SHRX, shrx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2310 IEMOP_BODY_Gy_Ey_By_NoEflags(shrx, fBmi2, 0);
2311}
2312
2313/* Opcode VEX.0F38 0xf8 - invalid. */
2314/* Opcode VEX.66.0F38 0xf8 - invalid. */
2315/* Opcode VEX.F3.0F38 0xf8 - invalid. */
2316/* Opcode VEX.F2.0F38 0xf8 - invalid. */
2317
2318/* Opcode VEX.0F38 0xf9 - invalid. */
2319/* Opcode VEX.66.0F38 0xf9 - invalid. */
2320/* Opcode VEX.F3.0F38 0xf9 - invalid. */
2321/* Opcode VEX.F2.0F38 0xf9 - invalid. */
2322
2323/* Opcode VEX.0F38 0xfa - invalid. */
2324/* Opcode VEX.66.0F38 0xfa - invalid. */
2325/* Opcode VEX.F3.0F38 0xfa - invalid. */
2326/* Opcode VEX.F2.0F38 0xfa - invalid. */
2327
2328/* Opcode VEX.0F38 0xfb - invalid. */
2329/* Opcode VEX.66.0F38 0xfb - invalid. */
2330/* Opcode VEX.F3.0F38 0xfb - invalid. */
2331/* Opcode VEX.F2.0F38 0xfb - invalid. */
2332
2333/* Opcode VEX.0F38 0xfc - invalid. */
2334/* Opcode VEX.66.0F38 0xfc - invalid. */
2335/* Opcode VEX.F3.0F38 0xfc - invalid. */
2336/* Opcode VEX.F2.0F38 0xfc - invalid. */
2337
2338/* Opcode VEX.0F38 0xfd - invalid. */
2339/* Opcode VEX.66.0F38 0xfd - invalid. */
2340/* Opcode VEX.F3.0F38 0xfd - invalid. */
2341/* Opcode VEX.F2.0F38 0xfd - invalid. */
2342
2343/* Opcode VEX.0F38 0xfe - invalid. */
2344/* Opcode VEX.66.0F38 0xfe - invalid. */
2345/* Opcode VEX.F3.0F38 0xfe - invalid. */
2346/* Opcode VEX.F2.0F38 0xfe - invalid. */
2347
2348/* Opcode VEX.0F38 0xff - invalid. */
2349/* Opcode VEX.66.0F38 0xff - invalid. */
2350/* Opcode VEX.F3.0F38 0xff - invalid. */
2351/* Opcode VEX.F2.0F38 0xff - invalid. */
2352
2353
2354/**
2355 * VEX opcode map \#2.
2356 *
2357 * @sa g_apfnThreeByte0f38
2358 */
2359IEM_STATIC const PFNIEMOP g_apfnVexMap2[] =
2360{
2361 /* no prefix, 066h prefix f3h prefix, f2h prefix */
2362 /* 0x00 */ iemOp_InvalidNeedRM, iemOp_vpshufb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2363 /* 0x01 */ iemOp_InvalidNeedRM, iemOp_vphaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2364 /* 0x02 */ iemOp_InvalidNeedRM, iemOp_vphaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2365 /* 0x03 */ iemOp_InvalidNeedRM, iemOp_vphaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2366 /* 0x04 */ iemOp_InvalidNeedRM, iemOp_vpmaddubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2367 /* 0x05 */ iemOp_InvalidNeedRM, iemOp_vphsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2368 /* 0x06 */ iemOp_InvalidNeedRM, iemOp_vphsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2369 /* 0x07 */ iemOp_InvalidNeedRM, iemOp_vphsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2370 /* 0x08 */ iemOp_InvalidNeedRM, iemOp_vpsignb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2371 /* 0x09 */ iemOp_InvalidNeedRM, iemOp_vpsignw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2372 /* 0x0a */ iemOp_InvalidNeedRM, iemOp_vpsignd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2373 /* 0x0b */ iemOp_InvalidNeedRM, iemOp_vpmulhrsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2374 /* 0x0c */ iemOp_InvalidNeedRM, iemOp_vpermilps_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2375 /* 0x0d */ iemOp_InvalidNeedRM, iemOp_vpermilpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2376 /* 0x0e */ iemOp_InvalidNeedRM, iemOp_vtestps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2377 /* 0x0f */ iemOp_InvalidNeedRM, iemOp_vtestpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2378
2379 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRM),
2380 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
2381 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
2382 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
2383 /* 0x14 */ IEMOP_X4(iemOp_InvalidNeedRM),
2384 /* 0x15 */ IEMOP_X4(iemOp_InvalidNeedRM),
2385 /* 0x16 */ iemOp_InvalidNeedRM, iemOp_vpermps_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2386 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_vptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2387 /* 0x18 */ iemOp_InvalidNeedRM, iemOp_vbroadcastss_Vx_Wd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2388 /* 0x19 */ iemOp_InvalidNeedRM, iemOp_vbroadcastsd_Vqq_Wq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2389 /* 0x1a */ iemOp_InvalidNeedRM, iemOp_vbroadcastf128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2390 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
2391 /* 0x1c */ iemOp_InvalidNeedRM, iemOp_vpabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2392 /* 0x1d */ iemOp_InvalidNeedRM, iemOp_vpabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2393 /* 0x1e */ iemOp_InvalidNeedRM, iemOp_vpabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2394 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
2395
2396 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2397 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2398 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2399 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2400 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2401 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_vpmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2402 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
2403 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
2404 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_vpmuldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2405 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2406 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_vmovntdqa_Vx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2407 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_vpackusdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2408 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2409 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2410 /* 0x2e */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2411 /* 0x2f */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2412
2413 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2414 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2415 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2416 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2417 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2418 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_vpmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2419 /* 0x36 */ iemOp_InvalidNeedRM, iemOp_vpermd_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2420 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2421 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_vpminsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2422 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_vpminsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2423 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_vpminuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2424 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_vpminud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2425 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_vpmaxsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2426 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_vpmaxsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2427 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_vpmaxuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2428 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_vpmaxud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2429
2430 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_vpmulld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2431 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_vphminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2432 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
2433 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
2434 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
2435 /* 0x45 */ iemOp_InvalidNeedRM, iemOp_vpsrlvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2436 /* 0x46 */ iemOp_InvalidNeedRM, iemOp_vsravd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2437 /* 0x47 */ iemOp_InvalidNeedRM, iemOp_vpsllvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2438 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
2439 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
2440 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
2441 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
2442 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
2443 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
2444 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
2445 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
2446
2447 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
2448 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
2449 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
2450 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
2451 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
2452 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
2453 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
2454 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
2455 /* 0x58 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2456 /* 0x59 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2457 /* 0x5a */ iemOp_InvalidNeedRM, iemOp_vbroadcasti128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2458 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
2459 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
2460 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
2461 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
2462 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
2463
2464 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
2465 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
2466 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
2467 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
2468 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
2469 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
2470 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
2471 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
2472 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
2473 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
2474 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
2475 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
2476 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
2477 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
2478 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
2479 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
2480
2481 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
2482 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
2483 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
2484 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
2485 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
2486 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
2487 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
2488 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
2489 /* 0x78 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2490 /* 0x79 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2491 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
2492 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
2493 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
2494 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
2495 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
2496 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
2497
2498 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
2499 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
2500 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
2501 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
2502 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
2503 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
2504 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
2505 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
2506 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
2507 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
2508 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
2509 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
2510 /* 0x8c */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2511 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
2512 /* 0x8e */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Mx_Vx_Hx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2513 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
2514
2515 /* 0x90 */ iemOp_InvalidNeedRM, iemOp_vgatherdd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2516 /* 0x91 */ iemOp_InvalidNeedRM, iemOp_vgatherqd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2517 /* 0x92 */ iemOp_InvalidNeedRM, iemOp_vgatherdps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2518 /* 0x93 */ iemOp_InvalidNeedRM, iemOp_vgatherqps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2519 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
2520 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
2521 /* 0x96 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub132ps_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2522 /* 0x97 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2523 /* 0x98 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2524 /* 0x99 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2525 /* 0x9a */ iemOp_InvalidNeedRM, iemOp_vfmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2526 /* 0x9b */ iemOp_InvalidNeedRM, iemOp_vfmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2527 /* 0x9c */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2528 /* 0x9d */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2529 /* 0x9e */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2530 /* 0x9f */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2531
2532 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2533 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2534 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2535 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2536 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2537 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2538 /* 0xa6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2539 /* 0xa7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2540 /* 0xa8 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2541 /* 0xa9 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2542 /* 0xaa */ iemOp_InvalidNeedRM, iemOp_vfmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2543 /* 0xab */ iemOp_InvalidNeedRM, iemOp_vfmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2544 /* 0xac */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2545 /* 0xad */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2546 /* 0xae */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2547 /* 0xaf */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2548
2549 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2550 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2551 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2552 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2553 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2554 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2555 /* 0xb6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2556 /* 0xb7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2557 /* 0xb8 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2558 /* 0xb9 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2559 /* 0xba */ iemOp_InvalidNeedRM, iemOp_vfmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2560 /* 0xbb */ iemOp_InvalidNeedRM, iemOp_vfmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2561 /* 0xbc */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2562 /* 0xbd */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2563 /* 0xbe */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2564 /* 0xbf */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2565
2566 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2567 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2568 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2569 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2570 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2571 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2572 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2573 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2574 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2575 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2576 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
2577 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
2578 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
2579 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
2580 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
2581 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
2582
2583 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2584 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2585 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2586 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2587 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2588 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2589 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2590 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2591 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2592 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2593 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
2594 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vaesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2595 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vaesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2596 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vaesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2597 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vaesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2598 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vaesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2599
2600 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2601 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2602 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2603 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2604 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2605 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2606 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2607 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2608 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2609 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2610 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
2611 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
2612 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
2613 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
2614 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
2615 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
2616
2617 /* 0xf0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2618 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2619 /* 0xf2 */ iemOp_andn_Gy_By_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2620 /* 0xf3 */ iemOp_VGrp17_f3, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2621 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2622 /* 0xf5 */ iemOp_bzhi_Gy_Ey_By, iemOp_InvalidNeedRM, iemOp_pext_Gy_By_Ey, iemOp_pdep_Gy_By_Ey,
2623 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_mulx_By_Gy_rDX_Ey,
2624 /* 0xf7 */ iemOp_bextr_Gy_Ey_By, iemOp_shlx_Gy_Ey_By, iemOp_sarx_Gy_Ey_By, iemOp_shrx_Gy_Ey_By,
2625 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2626 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2627 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
2628 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
2629 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
2630 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
2631 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
2632 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
2633};
2634AssertCompile(RT_ELEMENTS(g_apfnVexMap2) == 1024);
2635
2636/** @} */
2637
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette