VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h@ 103659

Last change on this file since 103659 was 103558, checked in by vboxsync, 10 months ago

VMM/IEM: Implement vpermilpd instruction emulations, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 71.3 KB
Line 
1/* $Id: IEMAllInstVexMap3.cpp.h 103558 2024-02-24 11:06:53Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
56 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
57 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
58 IEM_MC_LOCAL(RTUINT256U, uDst);
59 IEM_MC_LOCAL(RTUINT256U, uSrc1);
60 IEM_MC_LOCAL(RTUINT256U, uSrc2);
61 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
62 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
63 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
64 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
65 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
66 IEM_MC_PREPARE_AVX_USAGE();
67 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
68 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
69 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
70 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
71 IEM_MC_ADVANCE_RIP_AND_FINISH();
72 IEM_MC_END();
73 }
74 else
75 {
76 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
77 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
78 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
79 IEM_MC_ARG(PRTUINT128U, puDst, 0);
80 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
81 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
82 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
83 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
84 IEM_MC_PREPARE_AVX_USAGE();
85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
87 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
88 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
89 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
90 IEM_MC_ADVANCE_RIP_AND_FINISH();
91 IEM_MC_END();
92 }
93 }
94 else
95 {
96 /*
97 * Register, memory.
98 */
99 if (pVCpu->iem.s.uVexLength)
100 {
101 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
102 IEM_MC_LOCAL(RTUINT256U, uDst);
103 IEM_MC_LOCAL(RTUINT256U, uSrc1);
104 IEM_MC_LOCAL(RTUINT256U, uSrc2);
105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
106 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
107 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
109
110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
111 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
112 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
113 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
114 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
115 IEM_MC_PREPARE_AVX_USAGE();
116
117 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
118 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
119 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
120 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
121
122 IEM_MC_ADVANCE_RIP_AND_FINISH();
123 IEM_MC_END();
124 }
125 else
126 {
127 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
128 IEM_MC_LOCAL(RTUINT128U, uSrc2);
129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
130 IEM_MC_ARG(PRTUINT128U, puDst, 0);
131 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
132 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
133
134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
135 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
136 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
137 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
138 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
139 IEM_MC_PREPARE_AVX_USAGE();
140
141 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
142 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
143 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
144 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
145 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
146
147 IEM_MC_ADVANCE_RIP_AND_FINISH();
148 IEM_MC_END();
149 }
150 }
151}
152
153
154/**
155 * Common worker for AVX instructions on the forms:
156 * - vpermilps/d xmm0, xmm1/mem128, imm8
157 * - vpermilps/d ymm0, ymm1/mem256, imm8
158 *
159 * Takes function table for function w/o implicit state parameter.
160 *
161 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
162 */
163FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF2IMM8, pImpl)
164{
165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
166 if (IEM_IS_MODRM_REG_MODE(bRm))
167 {
168 /*
169 * Register, register.
170 */
171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
172 if (pVCpu->iem.s.uVexLength)
173 {
174 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
175 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
176 IEM_MC_LOCAL(RTUINT256U, uDst);
177 IEM_MC_LOCAL(RTUINT256U, uSrc);
178 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
179 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
180 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
181 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
182 IEM_MC_PREPARE_AVX_USAGE();
183 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
185 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
186 IEM_MC_ADVANCE_RIP_AND_FINISH();
187 IEM_MC_END();
188 }
189 else
190 {
191 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
192 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
193 IEM_MC_ARG(PRTUINT128U, puDst, 0);
194 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
195 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
196 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
197 IEM_MC_PREPARE_AVX_USAGE();
198 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
199 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
201 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
202 IEM_MC_ADVANCE_RIP_AND_FINISH();
203 IEM_MC_END();
204 }
205 }
206 else
207 {
208 /*
209 * Register, memory.
210 */
211 if (pVCpu->iem.s.uVexLength)
212 {
213 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
214 IEM_MC_LOCAL(RTUINT256U, uDst);
215 IEM_MC_LOCAL(RTUINT256U, uSrc);
216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
217 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
218 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
219
220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
221 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
222 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
223 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
224 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
225 IEM_MC_PREPARE_AVX_USAGE();
226
227 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
229 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
230
231 IEM_MC_ADVANCE_RIP_AND_FINISH();
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
237 IEM_MC_LOCAL(RTUINT128U, uSrc);
238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
239 IEM_MC_ARG(PRTUINT128U, puDst, 0);
240 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
241
242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
243 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
244 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
245 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
246 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
247 IEM_MC_PREPARE_AVX_USAGE();
248
249 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
252 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
253
254 IEM_MC_ADVANCE_RIP_AND_FINISH();
255 IEM_MC_END();
256 }
257 }
258}
259
260
261/**
262 * Common worker for AVX instructions on the forms:
263 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
264 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
265 *
266 * Takes function table for function w/o implicit state parameter.
267 *
268 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
269 */
270FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
271{
272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
273 if (IEM_IS_MODRM_REG_MODE(bRm))
274 {
275 /*
276 * Register, register.
277 */
278 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
279 if (pVCpu->iem.s.uVexLength)
280 {
281 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
282 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
283 IEM_MC_LOCAL(RTUINT256U, uDst);
284 IEM_MC_LOCAL(RTUINT256U, uSrc1);
285 IEM_MC_LOCAL(RTUINT256U, uSrc2);
286 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
287 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
288 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
289 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
290 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
291 IEM_MC_PREPARE_AVX_USAGE();
292 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
293 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
294 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
295 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
296 IEM_MC_ADVANCE_RIP_AND_FINISH();
297 IEM_MC_END();
298 }
299 else
300 {
301 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
302 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
303 IEM_MC_ARG(PRTUINT128U, puDst, 0);
304 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
305 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
306 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
307 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
308 IEM_MC_PREPARE_AVX_USAGE();
309 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
310 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
311 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
312 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
313 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
314 IEM_MC_ADVANCE_RIP_AND_FINISH();
315 IEM_MC_END();
316 }
317 }
318 else
319 {
320 /*
321 * Register, memory.
322 */
323 if (pVCpu->iem.s.uVexLength)
324 {
325 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
326 IEM_MC_LOCAL(RTUINT256U, uDst);
327 IEM_MC_LOCAL(RTUINT256U, uSrc1);
328 IEM_MC_LOCAL(RTUINT256U, uSrc2);
329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
330 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
331 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
332 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
333
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
335 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
336 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
337 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
338 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
339 IEM_MC_PREPARE_AVX_USAGE();
340
341 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
342 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
343 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
344 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
345
346 IEM_MC_ADVANCE_RIP_AND_FINISH();
347 IEM_MC_END();
348 }
349 else
350 {
351 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
352 IEM_MC_LOCAL(RTUINT128U, uSrc2);
353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
354 IEM_MC_ARG(PRTUINT128U, puDst, 0);
355 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
356 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
357
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
359 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
360 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
361 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
362 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
363 IEM_MC_PREPARE_AVX_USAGE();
364
365 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
367 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
368 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
369 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
370
371 IEM_MC_ADVANCE_RIP_AND_FINISH();
372 IEM_MC_END();
373 }
374 }
375}
376
377
378/** Opcode VEX.66.0F3A 0x00. */
379FNIEMOP_STUB(iemOp_vpermq_Vqq_Wqq_Ib);
380/** Opcode VEX.66.0F3A 0x01. */
381FNIEMOP_STUB(iemOp_vpermqd_Vqq_Wqq_Ib);
382/** Opcode VEX.66.0F3A 0x02. */
383FNIEMOP_STUB(iemOp_vpblendd_Vx_Wx_Ib);
384/* Opcode VEX.66.0F3A 0x03 - invalid */
385
386
387/** Opcode VEX.66.0F3A 0x04.
388 * AVX,AVX */
389FNIEMOP_DEF(iemOp_vpermilps_Vx_Wx_Ib)
390{
391 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPS, vpermilps, Vx, Wx, Ib, DISOPTYPE_HARMLESS, 0); /* @todo */
392 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilps);
393 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
394}
395
396
397/** Opcode VEX.66.0F3A 0x05.
398 * AVX,AVX */
399FNIEMOP_DEF(iemOp_vpermilpd_Vx_Wx_Ib)
400{
401 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPD, vpermilpd, Vx, Wx, Ib, DISOPTYPE_HARMLESS, 0); /* @todo */
402 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilpd);
403 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
404}
405
406
407/** Opcode VEX.66.0F3A 0x06 (vex only) */
408FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib)
409{
410 //IEMOP_MNEMONIC4(VEX_RVM, VPERM2F128, vperm2f128, Vqq, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0); /** @todo */
411
412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
413 if (IEM_IS_MODRM_REG_MODE(bRm))
414 {
415 /*
416 * Register, register.
417 */
418 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
419 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
420 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
421 IEM_MC_LOCAL(RTUINT256U, uDst);
422 IEM_MC_LOCAL(RTUINT256U, uSrc1);
423 IEM_MC_LOCAL(RTUINT256U, uSrc2);
424 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
425 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
426 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
427 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
428 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
429 IEM_MC_PREPARE_AVX_USAGE();
430 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
431 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
432 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
433 puDst, puSrc1, puSrc2, bImmArg);
434 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
435 IEM_MC_ADVANCE_RIP_AND_FINISH();
436 IEM_MC_END();
437 }
438 else
439 {
440 /*
441 * Register, memory.
442 */
443 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
444 IEM_MC_LOCAL(RTUINT256U, uDst);
445 IEM_MC_LOCAL(RTUINT256U, uSrc1);
446 IEM_MC_LOCAL(RTUINT256U, uSrc2);
447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
448 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
449 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
450 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
451
452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
453 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
454 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
455 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
456 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
457 IEM_MC_PREPARE_AVX_USAGE();
458
459 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
460 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
461 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
462 puDst, puSrc1, puSrc2, bImmArg);
463 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
464
465 IEM_MC_ADVANCE_RIP_AND_FINISH();
466 IEM_MC_END();
467 }
468}
469
470
471/* Opcode VEX.66.0F3A 0x07 - invalid */
472/** Opcode VEX.66.0F3A 0x08. */
473FNIEMOP_STUB(iemOp_vroundps_Vx_Wx_Ib);
474/** Opcode VEX.66.0F3A 0x09. */
475FNIEMOP_STUB(iemOp_vroundpd_Vx_Wx_Ib);
476/** Opcode VEX.66.0F3A 0x0a. */
477FNIEMOP_STUB(iemOp_vroundss_Vss_Wss_Ib);
478/** Opcode VEX.66.0F3A 0x0b. */
479FNIEMOP_STUB(iemOp_vroundsd_Vsd_Wsd_Ib);
480
481
482/** Opcode VEX.66.0F3A 0x0c.
483 * AVX,AVX */
484FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
485{
486 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPS, vblendps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
487 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
488 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
489}
490
491
492/** Opcode VEX.66.0F3A 0x0d.
493 * AVX,AVX */
494FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
495{
496 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPD, vblendpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
497 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
498 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
499}
500
501
502/** Opcode VEX.66.0F3A 0x0e.
503 * AVX,AVX2 */
504FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
505{
506 IEMOP_MNEMONIC3(VEX_RVM, VPBLENDW, vpblendw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
507 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
509}
510
511
512/** Opcode VEX.0F3A 0x0f - invalid. */
513
514
515/** Opcode VEX.66.0F3A 0x0f.
516 * AVX,AVX2 */
517FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
518{
519 IEMOP_MNEMONIC3(VEX_RVM, VPALIGNR, vpalignr, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
520 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
521 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
522}
523
524
525/* Opcode VEX.66.0F3A 0x10 - invalid */
526/* Opcode VEX.66.0F3A 0x11 - invalid */
527/* Opcode VEX.66.0F3A 0x12 - invalid */
528/* Opcode VEX.66.0F3A 0x13 - invalid */
529/** Opcode VEX.66.0F3A 0x14. */
530FNIEMOP_STUB(iemOp_vpextrb_RdMb_Vdq_Ib);
531/** Opcode VEX.66.0F3A 0x15. */
532FNIEMOP_STUB(iemOp_vpextrw_RdMw_Vdq_Ib);
533/** Opcode VEX.66.0F3A 0x16. */
534FNIEMOP_STUB(iemOp_vpextrd_q_RdMw_Vdq_Ib);
535/** Opcode VEX.66.0F3A 0x17. */
536FNIEMOP_STUB(iemOp_vextractps_Ed_Vdq_Ib);
537
538
539/** Opcode VEX.66.0F3A 0x18 (vex only). */
540FNIEMOP_DEF(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib)
541{
542 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTF128, vinsertf128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
544 if (IEM_IS_MODRM_REG_MODE(bRm))
545 {
546 /*
547 * Register, register.
548 */
549 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
550 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
551 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
552 IEM_MC_LOCAL(RTUINT128U, uSrc);
553
554 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
555 IEM_MC_PREPARE_AVX_USAGE();
556
557 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
558 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
559 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
560
561 IEM_MC_ADVANCE_RIP_AND_FINISH();
562 IEM_MC_END();
563 }
564 else
565 {
566 /*
567 * Register, memory.
568 */
569 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
570 IEM_MC_LOCAL(RTUINT128U, uSrc);
571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
572
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
574 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
575 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
576 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
577 IEM_MC_PREPARE_AVX_USAGE();
578
579 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
580 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
581 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
582
583 IEM_MC_ADVANCE_RIP_AND_FINISH();
584 IEM_MC_END();
585 }
586}
587
588
589/** Opcode VEX.66.0F3A 0x19 (vex only). */
590FNIEMOP_STUB(iemOp_vextractf128_Wdq_Vqq_Ib);
591/* Opcode VEX.66.0F3A 0x1a - invalid */
592/* Opcode VEX.66.0F3A 0x1b - invalid */
593/* Opcode VEX.66.0F3A 0x1c - invalid */
594/** Opcode VEX.66.0F3A 0x1d (vex only). */
595FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
596/* Opcode VEX.66.0F3A 0x1e - invalid */
597/* Opcode VEX.66.0F3A 0x1f - invalid */
598
599
600/** Opcode VEX.66.0F3A 0x20. */
601FNIEMOP_STUB(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib);
602/** Opcode VEX.66.0F3A 0x21, */
603FNIEMOP_STUB(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib);
604/** Opcode VEX.66.0F3A 0x22. */
605FNIEMOP_STUB(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib);
606/* Opcode VEX.66.0F3A 0x23 - invalid */
607/* Opcode VEX.66.0F3A 0x24 - invalid */
608/* Opcode VEX.66.0F3A 0x25 - invalid */
609/* Opcode VEX.66.0F3A 0x26 - invalid */
610/* Opcode VEX.66.0F3A 0x27 - invalid */
611/* Opcode VEX.66.0F3A 0x28 - invalid */
612/* Opcode VEX.66.0F3A 0x29 - invalid */
613/* Opcode VEX.66.0F3A 0x2a - invalid */
614/* Opcode VEX.66.0F3A 0x2b - invalid */
615/* Opcode VEX.66.0F3A 0x2c - invalid */
616/* Opcode VEX.66.0F3A 0x2d - invalid */
617/* Opcode VEX.66.0F3A 0x2e - invalid */
618/* Opcode VEX.66.0F3A 0x2f - invalid */
619
620
621/* Opcode VEX.66.0F3A 0x30 - invalid */
622/* Opcode VEX.66.0F3A 0x31 - invalid */
623/* Opcode VEX.66.0F3A 0x32 - invalid */
624/* Opcode VEX.66.0F3A 0x33 - invalid */
625/* Opcode VEX.66.0F3A 0x34 - invalid */
626/* Opcode VEX.66.0F3A 0x35 - invalid */
627/* Opcode VEX.66.0F3A 0x36 - invalid */
628/* Opcode VEX.66.0F3A 0x37 - invalid */
629
630
631/** Opcode VEX.66.0F3A 0x38 (vex only). */
632FNIEMOP_DEF(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib)
633{
634 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTI128, vinserti128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
636 if (IEM_IS_MODRM_REG_MODE(bRm))
637 {
638 /*
639 * Register, register.
640 */
641 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
642 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
643 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
644 IEM_MC_LOCAL(RTUINT128U, uSrc);
645
646 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
647 IEM_MC_PREPARE_AVX_USAGE();
648
649 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
650 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
651 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
652
653 IEM_MC_ADVANCE_RIP_AND_FINISH();
654 IEM_MC_END();
655 }
656 else
657 {
658 /*
659 * Register, memory.
660 */
661 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
662 IEM_MC_LOCAL(RTUINT128U, uSrc);
663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
664
665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
666 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
667 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
668 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
669 IEM_MC_PREPARE_AVX_USAGE();
670
671 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
672 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
673 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
674
675 IEM_MC_ADVANCE_RIP_AND_FINISH();
676 IEM_MC_END();
677 }
678}
679
680
681/** Opcode VEX.66.0F3A 0x39 (vex only). */
682FNIEMOP_STUB(iemOp_vextracti128_Wdq_Vqq_Ib);
683/* Opcode VEX.66.0F3A 0x3a - invalid */
684/* Opcode VEX.66.0F3A 0x3b - invalid */
685/* Opcode VEX.66.0F3A 0x3c - invalid */
686/* Opcode VEX.66.0F3A 0x3d - invalid */
687/* Opcode VEX.66.0F3A 0x3e - invalid */
688/* Opcode VEX.66.0F3A 0x3f - invalid */
689
690
691/** Opcode VEX.66.0F3A 0x40. */
692FNIEMOP_STUB(iemOp_vdpps_Vx_Hx_Wx_Ib);
693/** Opcode VEX.66.0F3A 0x41, */
694FNIEMOP_STUB(iemOp_vdppd_Vdq_Hdq_Wdq_Ib);
695
696
697/** Opcode VEX.66.0F3A 0x42. */
698FNIEMOP_DEF(iemOp_vmpsadbw_Vx_Hx_Wx_Ib)
699{
700 IEMOP_MNEMONIC3(VEX_RVM, VMPSADBW, vmpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /** @todo */
701 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vmpsadbw);
702 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
703}
704
705
706/* Opcode VEX.66.0F3A 0x43 - invalid */
707
708
709/** Opcode VEX.66.0F3A 0x44. */
710FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
711{
712 //IEMOP_MNEMONIC3(VEX_RVM, VPCLMULQDQ, vpclmulqdq, Vdq, Hdq, Wdq, DISOPTYPE_HARMLESS, 0); /* @todo */
713
714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
715 if (IEM_IS_MODRM_REG_MODE(bRm))
716 {
717 /*
718 * Register, register.
719 */
720 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
721 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
722 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
723 IEM_MC_ARG(PRTUINT128U, puDst, 0);
724 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
725 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
726 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
727 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
728 IEM_MC_PREPARE_AVX_USAGE();
729 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
730 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
731 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
732 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
733 puDst, puSrc1, puSrc2, bImmArg);
734 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
735 IEM_MC_ADVANCE_RIP_AND_FINISH();
736 IEM_MC_END();
737 }
738 else
739 {
740 /*
741 * Register, memory.
742 */
743 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEM_MC_LOCAL(RTUINT128U, uSrc2);
745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
746 IEM_MC_ARG(PRTUINT128U, puDst, 0);
747 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
748 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
749
750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
751 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
752 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
753 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
754 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
755 IEM_MC_PREPARE_AVX_USAGE();
756
757 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
758 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
759 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
760 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
761 puDst, puSrc1, puSrc2, bImmArg);
762 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
763
764 IEM_MC_ADVANCE_RIP_AND_FINISH();
765 IEM_MC_END();
766 }
767}
768
769
770/* Opcode VEX.66.0F3A 0x45 - invalid */
771
772
773/** Opcode VEX.66.0F3A 0x46 (vex only) */
774FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib)
775{
776 //IEMOP_MNEMONIC4(VEX_RVM, VPERM2I128, vperm2i128, Vqq, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0); /** @todo */
777
778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
779 if (IEM_IS_MODRM_REG_MODE(bRm))
780 {
781 /*
782 * Register, register.
783 */
784 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
785 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
786 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
787 IEM_MC_LOCAL(RTUINT256U, uDst);
788 IEM_MC_LOCAL(RTUINT256U, uSrc1);
789 IEM_MC_LOCAL(RTUINT256U, uSrc2);
790 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
791 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
792 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
793 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
794 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
795 IEM_MC_PREPARE_AVX_USAGE();
796 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
797 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
798 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
799 puDst, puSrc1, puSrc2, bImmArg);
800 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
801 IEM_MC_ADVANCE_RIP_AND_FINISH();
802 IEM_MC_END();
803 }
804 else
805 {
806 /*
807 * Register, memory.
808 */
809 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
810 IEM_MC_LOCAL(RTUINT256U, uDst);
811 IEM_MC_LOCAL(RTUINT256U, uSrc1);
812 IEM_MC_LOCAL(RTUINT256U, uSrc2);
813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
814 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
815 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
816 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
817
818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
819 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
820 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
821 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
822 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
823 IEM_MC_PREPARE_AVX_USAGE();
824
825 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
826 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
827 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
828 puDst, puSrc1, puSrc2, bImmArg);
829 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
830
831 IEM_MC_ADVANCE_RIP_AND_FINISH();
832 IEM_MC_END();
833 }
834}
835
836
837/* Opcode VEX.66.0F3A 0x47 - invalid */
838/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
839FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
840/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
841FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
842
843
844/**
845 * Common worker for AVX2 instructions on the forms:
846 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
847 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
848 *
849 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
850 */
851FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
852{
853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
854 if (IEM_IS_MODRM_REG_MODE(bRm))
855 {
856 /*
857 * Register, register.
858 */
859 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
860 if (pVCpu->iem.s.uVexLength)
861 {
862 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
863 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
864 IEM_MC_LOCAL(RTUINT256U, uDst);
865 IEM_MC_LOCAL(RTUINT256U, uSrc1);
866 IEM_MC_LOCAL(RTUINT256U, uSrc2);
867 IEM_MC_LOCAL(RTUINT256U, uSrc3);
868 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
869 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
870 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
871 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
872 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
873 IEM_MC_PREPARE_AVX_USAGE();
874 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
875 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
876 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
877 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
878 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
879 IEM_MC_ADVANCE_RIP_AND_FINISH();
880 IEM_MC_END();
881 }
882 else
883 {
884 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
885 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
886 IEM_MC_ARG(PRTUINT128U, puDst, 0);
887 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
888 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
889 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
890 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
891 IEM_MC_PREPARE_AVX_USAGE();
892 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
893 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
894 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
895 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
896 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
897 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
898 IEM_MC_ADVANCE_RIP_AND_FINISH();
899 IEM_MC_END();
900 }
901 }
902 else
903 {
904 /*
905 * Register, memory.
906 */
907 if (pVCpu->iem.s.uVexLength)
908 {
909 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
910 IEM_MC_LOCAL(RTUINT256U, uDst);
911 IEM_MC_LOCAL(RTUINT256U, uSrc1);
912 IEM_MC_LOCAL(RTUINT256U, uSrc2);
913 IEM_MC_LOCAL(RTUINT256U, uSrc3);
914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
915 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
916 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
917 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
918 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
919
920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
921 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
922
923 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
924 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
925 IEM_MC_PREPARE_AVX_USAGE();
926
927 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
928 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
929 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
930 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
931 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
932 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937 else
938 {
939 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
940 IEM_MC_LOCAL(RTUINT128U, uSrc2);
941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
942 IEM_MC_ARG(PRTUINT128U, puDst, 0);
943 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
944 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
945 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
946
947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
948 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
949
950 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
951 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
952 IEM_MC_PREPARE_AVX_USAGE();
953
954 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
955 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
956 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
957 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
958 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
959 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
960
961 IEM_MC_ADVANCE_RIP_AND_FINISH();
962 IEM_MC_END();
963 }
964 }
965}
966
967
968/** Opcode VEX.66.0F3A 0x4a (vex only).
969 * AVX, AVX */
970FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
971{
972 //IEMOP_MNEMONIC4(VEX_RVM, VBLENDVPS, vpblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
973 IEMOPBLENDOP_INIT_VARS(vblendvps);
974 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
975}
976
977
978/** Opcode VEX.66.0F3A 0x4b (vex only).
979 * AVX, AVX */
980FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
981{
982 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVPD, blendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
983 IEMOPBLENDOP_INIT_VARS(vblendvpd);
984 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
985}
986
987
988/**
989 * Common worker for AVX2 instructions on the forms:
990 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
991 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
992 *
993 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
994 */
995FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
996{
997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
998 if (IEM_IS_MODRM_REG_MODE(bRm))
999 {
1000 /*
1001 * Register, register.
1002 */
1003 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1004 if (pVCpu->iem.s.uVexLength)
1005 {
1006 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
1007 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
1008 IEM_MC_LOCAL(RTUINT256U, uDst);
1009 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1010 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1011 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1012 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1013 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1014 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1015 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1016 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1017 IEM_MC_PREPARE_AVX_USAGE();
1018 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1019 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1020 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1021 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1022 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1023 IEM_MC_ADVANCE_RIP_AND_FINISH();
1024 IEM_MC_END();
1025 }
1026 else
1027 {
1028 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1029 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1030 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1031 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1032 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1033 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1034 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1035 IEM_MC_PREPARE_AVX_USAGE();
1036 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1037 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1038 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1039 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1040 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1041 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1042 IEM_MC_ADVANCE_RIP_AND_FINISH();
1043 IEM_MC_END();
1044 }
1045 }
1046 else
1047 {
1048 /*
1049 * Register, memory.
1050 */
1051 if (pVCpu->iem.s.uVexLength)
1052 {
1053 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
1054 IEM_MC_LOCAL(RTUINT256U, uDst);
1055 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1056 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1057 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1059 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1060 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1061 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1062 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1063
1064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1065 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1066
1067 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
1068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1069 IEM_MC_PREPARE_AVX_USAGE();
1070
1071 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1072 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1073 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1074 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1075 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1076 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1077
1078 IEM_MC_ADVANCE_RIP_AND_FINISH();
1079 IEM_MC_END();
1080 }
1081 else
1082 {
1083 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1084 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1086 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1087 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1088 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1089 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1090
1091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1092 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1093
1094 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1095 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1096 IEM_MC_PREPARE_AVX_USAGE();
1097
1098 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1099 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1100 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1101 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1102 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1103 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1104
1105 IEM_MC_ADVANCE_RIP_AND_FINISH();
1106 IEM_MC_END();
1107 }
1108 }
1109}
1110
1111
1112/** Opcode VEX.66.0F3A 0x4c (vex only).
1113 * AVX, AVX2 */
1114FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
1115{
1116 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVB, vpblendvb, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
1117 IEMOPBLENDOP_INIT_VARS(vpblendvb);
1118 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1119}
1120
1121
1122/* Opcode VEX.66.0F3A 0x4d - invalid */
1123/* Opcode VEX.66.0F3A 0x4e - invalid */
1124/* Opcode VEX.66.0F3A 0x4f - invalid */
1125
1126
1127/* Opcode VEX.66.0F3A 0x50 - invalid */
1128/* Opcode VEX.66.0F3A 0x51 - invalid */
1129/* Opcode VEX.66.0F3A 0x52 - invalid */
1130/* Opcode VEX.66.0F3A 0x53 - invalid */
1131/* Opcode VEX.66.0F3A 0x54 - invalid */
1132/* Opcode VEX.66.0F3A 0x55 - invalid */
1133/* Opcode VEX.66.0F3A 0x56 - invalid */
1134/* Opcode VEX.66.0F3A 0x57 - invalid */
1135/* Opcode VEX.66.0F3A 0x58 - invalid */
1136/* Opcode VEX.66.0F3A 0x59 - invalid */
1137/* Opcode VEX.66.0F3A 0x5a - invalid */
1138/* Opcode VEX.66.0F3A 0x5b - invalid */
1139/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
1140FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
1141/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
1142FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
1143/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
1144FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
1145/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
1146FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
1147
1148
1149/** Opcode VEX.66.0F3A 0x60. */
1150FNIEMOP_STUB(iemOp_vpcmpestrm_Vdq_Wdq_Ib);
1151/** Opcode VEX.66.0F3A 0x61, */
1152FNIEMOP_STUB(iemOp_vpcmpestri_Vdq_Wdq_Ib);
1153/** Opcode VEX.66.0F3A 0x62. */
1154FNIEMOP_STUB(iemOp_vpcmpistrm_Vdq_Wdq_Ib);
1155/** Opcode VEX.66.0F3A 0x63*/
1156FNIEMOP_STUB(iemOp_vpcmpistri_Vdq_Wdq_Ib);
1157/* Opcode VEX.66.0F3A 0x64 - invalid */
1158/* Opcode VEX.66.0F3A 0x65 - invalid */
1159/* Opcode VEX.66.0F3A 0x66 - invalid */
1160/* Opcode VEX.66.0F3A 0x67 - invalid */
1161/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
1162FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
1163/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
1164FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
1165/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
1166FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
1167/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
1168FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
1169/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
1170FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
1171/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
1172FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
1173/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
1174FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
1175/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
1176FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
1177
1178/* Opcode VEX.66.0F3A 0x70 - invalid */
1179/* Opcode VEX.66.0F3A 0x71 - invalid */
1180/* Opcode VEX.66.0F3A 0x72 - invalid */
1181/* Opcode VEX.66.0F3A 0x73 - invalid */
1182/* Opcode VEX.66.0F3A 0x74 - invalid */
1183/* Opcode VEX.66.0F3A 0x75 - invalid */
1184/* Opcode VEX.66.0F3A 0x76 - invalid */
1185/* Opcode VEX.66.0F3A 0x77 - invalid */
1186/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
1187FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
1188/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
1189FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
1190/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
1191FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
1192/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
1193FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
1194/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
1195FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
1196/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
1197FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
1198/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
1199FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
1200/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
1201FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
1202
1203/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
1204
1205
1206/* Opcode 0x0f 0xc0 - invalid */
1207/* Opcode 0x0f 0xc1 - invalid */
1208/* Opcode 0x0f 0xc2 - invalid */
1209/* Opcode 0x0f 0xc3 - invalid */
1210/* Opcode 0x0f 0xc4 - invalid */
1211/* Opcode 0x0f 0xc5 - invalid */
1212/* Opcode 0x0f 0xc6 - invalid */
1213/* Opcode 0x0f 0xc7 - invalid */
1214/* Opcode 0x0f 0xc8 - invalid */
1215/* Opcode 0x0f 0xc9 - invalid */
1216/* Opcode 0x0f 0xca - invalid */
1217/* Opcode 0x0f 0xcb - invalid */
1218/* Opcode 0x0f 0xcc - invalid */
1219/* Opcode 0x0f 0xcd - invalid */
1220/* Opcode 0x0f 0xce - invalid */
1221/* Opcode 0x0f 0xcf - invalid */
1222
1223
1224/* Opcode VEX.66.0F3A 0xd0 - invalid */
1225/* Opcode VEX.66.0F3A 0xd1 - invalid */
1226/* Opcode VEX.66.0F3A 0xd2 - invalid */
1227/* Opcode VEX.66.0F3A 0xd3 - invalid */
1228/* Opcode VEX.66.0F3A 0xd4 - invalid */
1229/* Opcode VEX.66.0F3A 0xd5 - invalid */
1230/* Opcode VEX.66.0F3A 0xd6 - invalid */
1231/* Opcode VEX.66.0F3A 0xd7 - invalid */
1232/* Opcode VEX.66.0F3A 0xd8 - invalid */
1233/* Opcode VEX.66.0F3A 0xd9 - invalid */
1234/* Opcode VEX.66.0F3A 0xda - invalid */
1235/* Opcode VEX.66.0F3A 0xdb - invalid */
1236/* Opcode VEX.66.0F3A 0xdc - invalid */
1237/* Opcode VEX.66.0F3A 0xdd - invalid */
1238/* Opcode VEX.66.0F3A 0xde - invalid */
1239/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
1240FNIEMOP_STUB(iemOp_vaeskeygen_Vdq_Wdq_Ib);
1241
1242
1243/**
1244 * @opcode 0xf0
1245 * @oppfx 0xf2
1246 * @opflclass unchanged
1247 */
1248FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
1249{
1250 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
1251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1252 if (IEM_IS_MODRM_REG_MODE(bRm))
1253 {
1254 /*
1255 * Register, register.
1256 */
1257 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1258 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1259 {
1260 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
1261 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1262 IEM_MC_ARG(uint64_t *, pDst, 0);
1263 IEM_MC_ARG(uint64_t, uSrc1, 1);
1264 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1265 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1266 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1267 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1268 IEM_MC_ADVANCE_RIP_AND_FINISH();
1269 IEM_MC_END();
1270 }
1271 else
1272 {
1273 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1274 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1275 IEM_MC_ARG(uint32_t *, pDst, 0);
1276 IEM_MC_ARG(uint32_t, uSrc1, 1);
1277 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1278 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1279 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1280 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1281 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1282 IEM_MC_ADVANCE_RIP_AND_FINISH();
1283 IEM_MC_END();
1284 }
1285 }
1286 else
1287 {
1288 /*
1289 * Register, memory.
1290 */
1291 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1292 {
1293 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
1294 IEM_MC_ARG(uint64_t *, pDst, 0);
1295 IEM_MC_ARG(uint64_t, uSrc1, 1);
1296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1298 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1299 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1300 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1301 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1302 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1303 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1304 IEM_MC_ADVANCE_RIP_AND_FINISH();
1305 IEM_MC_END();
1306 }
1307 else
1308 {
1309 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1310 IEM_MC_ARG(uint32_t *, pDst, 0);
1311 IEM_MC_ARG(uint32_t, uSrc1, 1);
1312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1314 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1315 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1316 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1317 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1318 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1319 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1320 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1321 IEM_MC_ADVANCE_RIP_AND_FINISH();
1322 IEM_MC_END();
1323 }
1324 }
1325}
1326
1327
1328/**
1329 * VEX opcode map \#3.
1330 *
1331 * @sa g_apfnThreeByte0f3a
1332 */
1333const PFNIEMOP g_apfnVexMap3[] =
1334{
1335 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1336 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1337 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermqd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1338 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1339 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1340 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1341 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1342 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1343 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1344 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1345 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1346 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1347 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1348 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1349 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1350 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1351 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1352
1353 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1354 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1355 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1356 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1357 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_RdMb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1358 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1359 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1360 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1361 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1362 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1363 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1364 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1365 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1366 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1367 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1368 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1369
1370 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1371 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1372 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1373 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1374 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1375 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1376 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1377 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1378 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1379 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1380 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1381 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1382 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1383 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1384 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1385 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1386
1387 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1388 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1389 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1390 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1391 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1392 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1393 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1394 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1395 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1396 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1397 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1398 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1399 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1400 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1401 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1402 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1403
1404 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1405 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1406 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1407 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1408 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1409 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1410 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1411 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1412 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1413 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1414 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1415 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1416 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1417 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1418 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1419 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1420
1421 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1422 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1423 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1424 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1425 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1426 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1427 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1428 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1429 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1430 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1431 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1432 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1433 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1434 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1435 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1436 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1437
1438 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1439 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1440 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1441 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1442 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1443 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1444 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1445 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1446 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1447 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1448 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1449 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1450 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1451 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1452 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1453 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1454
1455 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1456 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1457 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1458 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1459 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1460 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1461 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1462 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1463 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1464 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1465 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1466 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1467 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1468 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1469 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1470 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1471
1472 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1473 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1474 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1475 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1476 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1477 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1478 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1479 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1480 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1481 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1482 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1483 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1484 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1485 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1486 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1487 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1488
1489 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1490 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1491 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1492 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1493 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1494 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1495 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1496 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1497 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1498 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1499 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1500 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1501 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1502 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1503 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1504 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1505
1506 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1507 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1508 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1509 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1510 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1511 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1512 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1513 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1514 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1515 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1516 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1517 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1518 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1519 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1520 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1521 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1522
1523 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1524 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1525 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1526 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1527 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1528 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1529 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1530 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1531 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1532 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1533 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1534 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1535 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1536 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1537 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1538 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1539
1540 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1541 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1542 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1543 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1544 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1545 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1546 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1547 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1548 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1549 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1550 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1551 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1552 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1553 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1554 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1555 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1556
1557 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1558 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1559 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1560 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1561 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1562 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1563 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1564 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1565 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1566 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1567 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1568 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1569 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1570 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1571 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1572 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1573
1574 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1575 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1576 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1577 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1578 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1579 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1580 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1581 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1582 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1583 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1584 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1585 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1586 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1587 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1588 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1589 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1590
1591 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
1592 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1593 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1594 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1595 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1596 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1597 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1598 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1599 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1600 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1601 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1602 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1603 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1604 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1605 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1606 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1607};
1608AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
1609
1610/** @} */
1611
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette