VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h@ 103899

Last change on this file since 103899 was 103899, checked in by vboxsync, 13 months ago

VMM/IEM: More mnemonic @todos. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 73.8 KB
Line 
1/* $Id: IEMAllInstVexMap3.cpp.h 103899 2024-03-18 15:57:16Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
56 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
57 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
58 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
59 IEM_MC_PREPARE_AVX_USAGE();
60
61 IEM_MC_LOCAL(RTUINT256U, uDst);
62 IEM_MC_LOCAL(RTUINT256U, uSrc1);
63 IEM_MC_LOCAL(RTUINT256U, uSrc2);
64 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
65 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
66 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
67 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
68 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
69 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
70 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
71 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
72 IEM_MC_ADVANCE_RIP_AND_FINISH();
73 IEM_MC_END();
74 }
75 else
76 {
77 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
78 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
79 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
80 IEM_MC_ARG(PRTUINT128U, puDst, 0);
81 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
82 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
83 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
84 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
85 IEM_MC_PREPARE_AVX_USAGE();
86 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
87 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
88 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
89 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
90 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94 }
95 else
96 {
97 /*
98 * Register, memory.
99 */
100 if (pVCpu->iem.s.uVexLength)
101 {
102 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
103 IEM_MC_LOCAL(RTUINT256U, uDst);
104 IEM_MC_LOCAL(RTUINT256U, uSrc1);
105 IEM_MC_LOCAL(RTUINT256U, uSrc2);
106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
107 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
109 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
110
111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
113 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
114 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
116 IEM_MC_PREPARE_AVX_USAGE();
117
118 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
119 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
120 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
121 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
122
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
129 IEM_MC_LOCAL(RTUINT128U, uSrc2);
130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
132 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
134
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
136 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
137 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
138 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
140 IEM_MC_PREPARE_AVX_USAGE();
141
142 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
143 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
144 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
145 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
146 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
147
148 IEM_MC_ADVANCE_RIP_AND_FINISH();
149 IEM_MC_END();
150 }
151 }
152}
153
154
155/**
156 * Common worker for AVX instructions on the forms:
157 * - vpermilps/d xmm0, xmm1/mem128, imm8
158 * - vpermilps/d ymm0, ymm1/mem256, imm8
159 *
160 * Takes function table for function w/o implicit state parameter.
161 *
162 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
163 */
164FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF2IMM8, pImpl)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * Register, register.
171 */
172 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
173 if (pVCpu->iem.s.uVexLength)
174 {
175 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
176 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
177 IEM_MC_LOCAL(RTUINT256U, uDst);
178 IEM_MC_LOCAL(RTUINT256U, uSrc);
179 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
181 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
182 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
183 IEM_MC_PREPARE_AVX_USAGE();
184 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
186 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
194 IEM_MC_ARG(PRTUINT128U, puDst, 0);
195 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
196 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
197 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
198 IEM_MC_PREPARE_AVX_USAGE();
199 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
200 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
202 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
203 IEM_MC_ADVANCE_RIP_AND_FINISH();
204 IEM_MC_END();
205 }
206 }
207 else
208 {
209 /*
210 * Register, memory.
211 */
212 if (pVCpu->iem.s.uVexLength)
213 {
214 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
215 IEM_MC_LOCAL(RTUINT256U, uDst);
216 IEM_MC_LOCAL(RTUINT256U, uSrc);
217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
218 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
219 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
220
221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
222 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
223 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
224 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
226 IEM_MC_PREPARE_AVX_USAGE();
227
228 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
229 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
230 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
231
232 IEM_MC_ADVANCE_RIP_AND_FINISH();
233 IEM_MC_END();
234 }
235 else
236 {
237 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
238 IEM_MC_LOCAL(RTUINT128U, uSrc);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_ARG(PRTUINT128U, puDst, 0);
241 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
242
243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
244 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
245 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
246 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
248 IEM_MC_PREPARE_AVX_USAGE();
249
250 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
251 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
252 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
253 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
254
255 IEM_MC_ADVANCE_RIP_AND_FINISH();
256 IEM_MC_END();
257 }
258 }
259}
260
261
262/**
263 * Common worker for AVX instructions on the forms:
264 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
265 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
266 *
267 * Takes function table for function w/o implicit state parameter.
268 *
269 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
270 */
271FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
272{
273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
274 if (IEM_IS_MODRM_REG_MODE(bRm))
275 {
276 /*
277 * Register, register.
278 */
279 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
280 if (pVCpu->iem.s.uVexLength)
281 {
282 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
283 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
284 IEM_MC_LOCAL(RTUINT256U, uDst);
285 IEM_MC_LOCAL(RTUINT256U, uSrc1);
286 IEM_MC_LOCAL(RTUINT256U, uSrc2);
287 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
288 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
289 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
290 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
291 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
292 IEM_MC_PREPARE_AVX_USAGE();
293 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
294 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
295 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
296 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
297 IEM_MC_ADVANCE_RIP_AND_FINISH();
298 IEM_MC_END();
299 }
300 else
301 {
302 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
303 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
304 IEM_MC_ARG(PRTUINT128U, puDst, 0);
305 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
306 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
307 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
308 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
309 IEM_MC_PREPARE_AVX_USAGE();
310 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
311 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
312 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
313 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
314 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
315 IEM_MC_ADVANCE_RIP_AND_FINISH();
316 IEM_MC_END();
317 }
318 }
319 else
320 {
321 /*
322 * Register, memory.
323 */
324 if (pVCpu->iem.s.uVexLength)
325 {
326 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
327 IEM_MC_LOCAL(RTUINT256U, uDst);
328 IEM_MC_LOCAL(RTUINT256U, uSrc1);
329 IEM_MC_LOCAL(RTUINT256U, uSrc2);
330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
331 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
332 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
333 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
334
335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
336 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
337 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
338 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
339 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
340 IEM_MC_PREPARE_AVX_USAGE();
341
342 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
343 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
344 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
345 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
346
347 IEM_MC_ADVANCE_RIP_AND_FINISH();
348 IEM_MC_END();
349 }
350 else
351 {
352 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
353 IEM_MC_LOCAL(RTUINT128U, uSrc2);
354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
355 IEM_MC_ARG(PRTUINT128U, puDst, 0);
356 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
357 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
358
359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
360 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
361 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
362 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
363 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
364 IEM_MC_PREPARE_AVX_USAGE();
365
366 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
367 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
368 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
369 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
370 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
371
372 IEM_MC_ADVANCE_RIP_AND_FINISH();
373 IEM_MC_END();
374 }
375 }
376}
377
378
379/** Opcode VEX.66.0F3A 0x00. */
380FNIEMOP_STUB(iemOp_vpermq_Vqq_Wqq_Ib);
381/** Opcode VEX.66.0F3A 0x01. */
382FNIEMOP_STUB(iemOp_vpermqd_Vqq_Wqq_Ib);
383
384
385/** Opcode VEX.66.0F3A 0x02.
386 * AVX2,AVX2 */
387FNIEMOP_DEF(iemOp_vpblendd_Vx_Hx_Wx_Ib)
388{
389 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDD, vpblendd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
390 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendd);
391 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
392}
393
394
395/* Opcode VEX.66.0F3A 0x03 - invalid */
396
397
398/** Opcode VEX.66.0F3A 0x04.
399 * AVX,AVX */
400FNIEMOP_DEF(iemOp_vpermilps_Vx_Wx_Ib)
401{
402 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPS, vpermilps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
403 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilps);
404 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
405}
406
407
408/** Opcode VEX.66.0F3A 0x05.
409 * AVX,AVX */
410FNIEMOP_DEF(iemOp_vpermilpd_Vx_Wx_Ib)
411{
412 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPD, vpermilpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
413 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilpd);
414 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
415}
416
417
418/** Opcode VEX.66.0F3A 0x06 (vex only) */
419FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib)
420{
421 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2F128, vperm2f128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0);
422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
423 if (IEM_IS_MODRM_REG_MODE(bRm))
424 {
425 /*
426 * Register, register.
427 */
428 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
429 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
430 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
431 IEM_MC_LOCAL(RTUINT256U, uDst);
432 IEM_MC_LOCAL(RTUINT256U, uSrc1);
433 IEM_MC_LOCAL(RTUINT256U, uSrc2);
434 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
435 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
436 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
437 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
438 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
439 IEM_MC_PREPARE_AVX_USAGE();
440 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
441 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
442 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
443 puDst, puSrc1, puSrc2, bImmArg);
444 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
445 IEM_MC_ADVANCE_RIP_AND_FINISH();
446 IEM_MC_END();
447 }
448 else
449 {
450 /*
451 * Register, memory.
452 */
453 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
454 IEM_MC_LOCAL(RTUINT256U, uDst);
455 IEM_MC_LOCAL(RTUINT256U, uSrc1);
456 IEM_MC_LOCAL(RTUINT256U, uSrc2);
457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
458 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
459 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
460 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
461
462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
463 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
464 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
465 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
466 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
467 IEM_MC_PREPARE_AVX_USAGE();
468
469 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
470 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
471 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
472 puDst, puSrc1, puSrc2, bImmArg);
473 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
474
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478}
479
480
481/* Opcode VEX.66.0F3A 0x07 - invalid */
482/** Opcode VEX.66.0F3A 0x08. */
483FNIEMOP_STUB(iemOp_vroundps_Vx_Wx_Ib);
484/** Opcode VEX.66.0F3A 0x09. */
485FNIEMOP_STUB(iemOp_vroundpd_Vx_Wx_Ib);
486/** Opcode VEX.66.0F3A 0x0a. */
487FNIEMOP_STUB(iemOp_vroundss_Vss_Wss_Ib);
488/** Opcode VEX.66.0F3A 0x0b. */
489FNIEMOP_STUB(iemOp_vroundsd_Vsd_Wsd_Ib);
490
491
492/** Opcode VEX.66.0F3A 0x0c.
493 * AVX,AVX */
494FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
495{
496 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPS, vblendps, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
497 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
498 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
499}
500
501
502/** Opcode VEX.66.0F3A 0x0d.
503 * AVX,AVX */
504FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
505{
506 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPD, vblendpd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
507 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
509}
510
511
512/** Opcode VEX.66.0F3A 0x0e.
513 * AVX,AVX2 */
514FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
515{
516 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDW, vpblendw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
517 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
518 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
519}
520
521
522/** Opcode VEX.0F3A 0x0f - invalid. */
523
524
525/** Opcode VEX.66.0F3A 0x0f.
526 * AVX,AVX2 */
527FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
528{
529 IEMOP_MNEMONIC4(VEX_RVMI, VPALIGNR, vpalignr, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
530 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
531 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
532}
533
534
535/* Opcode VEX.66.0F3A 0x10 - invalid */
536/* Opcode VEX.66.0F3A 0x11 - invalid */
537/* Opcode VEX.66.0F3A 0x12 - invalid */
538/* Opcode VEX.66.0F3A 0x13 - invalid */
539/** Opcode VEX.66.0F3A 0x14. */
540FNIEMOP_STUB(iemOp_vpextrb_RdMb_Vdq_Ib);
541
542
543/** Opcode VEX.66.0F3A 0x15 - vpextrw Ew, Vdq, Ib */
544FNIEMOP_DEF(iemOp_vpextrw_Ew_Vdq_Ib)
545{
546 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRW, vpextrw, Ew_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
548 if (IEM_IS_MODRM_REG_MODE(bRm))
549 {
550 /*
551 * Register, register.
552 */
553 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
554 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
555 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
556 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
557 IEM_MC_PREPARE_AVX_USAGE();
558 IEM_MC_LOCAL(uint16_t, u16Dst);
559 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
560 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
561 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
562 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
563 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
564 pu16Dst, puSrc, bImmArg);
565 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u16Dst);
566 IEM_MC_ADVANCE_RIP_AND_FINISH();
567 IEM_MC_END();
568 }
569 else
570 {
571 /*
572 * Memory, register.
573 */
574 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
575 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
578
579 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
580 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
581 IEM_MC_PREPARE_AVX_USAGE();
582
583 IEM_MC_LOCAL(uint16_t, u16Dst);
584 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
585 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
586 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
587 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
588 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
589 pu16Dst, puSrc, bImmArg);
590 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u16Dst);
591 IEM_MC_ADVANCE_RIP_AND_FINISH();
592 IEM_MC_END();
593 }
594}
595
596
597/** Opcode VEX.66.0F3A 0x16. */
598FNIEMOP_STUB(iemOp_vpextrd_q_RdMw_Vdq_Ib);
599/** Opcode VEX.66.0F3A 0x17. */
600FNIEMOP_STUB(iemOp_vextractps_Ed_Vdq_Ib);
601
602
603/** Opcode VEX.66.0F3A 0x18 (vex only). */
604FNIEMOP_DEF(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib)
605{
606 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTF128, vinsertf128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
608 if (IEM_IS_MODRM_REG_MODE(bRm))
609 {
610 /*
611 * Register, register.
612 */
613 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
614 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
615 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
616 IEM_MC_LOCAL(RTUINT128U, uSrc);
617
618 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
619 IEM_MC_PREPARE_AVX_USAGE();
620
621 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
622 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
623 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628 else
629 {
630 /*
631 * Register, memory.
632 */
633 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
634 IEM_MC_LOCAL(RTUINT128U, uSrc);
635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
636
637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
638 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
639 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
640 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
641 IEM_MC_PREPARE_AVX_USAGE();
642
643 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
644 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
645 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
646
647 IEM_MC_ADVANCE_RIP_AND_FINISH();
648 IEM_MC_END();
649 }
650}
651
652
653/** Opcode VEX.66.0F3A 0x19 (vex only). */
654FNIEMOP_STUB(iemOp_vextractf128_Wdq_Vqq_Ib);
655/* Opcode VEX.66.0F3A 0x1a - invalid */
656/* Opcode VEX.66.0F3A 0x1b - invalid */
657/* Opcode VEX.66.0F3A 0x1c - invalid */
658/** Opcode VEX.66.0F3A 0x1d (vex only). */
659FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
660/* Opcode VEX.66.0F3A 0x1e - invalid */
661/* Opcode VEX.66.0F3A 0x1f - invalid */
662
663
664/** Opcode VEX.66.0F3A 0x20. */
665FNIEMOP_STUB(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib);
666/** Opcode VEX.66.0F3A 0x21, */
667FNIEMOP_STUB(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib);
668/** Opcode VEX.66.0F3A 0x22. */
669FNIEMOP_STUB(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib);
670/* Opcode VEX.66.0F3A 0x23 - invalid */
671/* Opcode VEX.66.0F3A 0x24 - invalid */
672/* Opcode VEX.66.0F3A 0x25 - invalid */
673/* Opcode VEX.66.0F3A 0x26 - invalid */
674/* Opcode VEX.66.0F3A 0x27 - invalid */
675/* Opcode VEX.66.0F3A 0x28 - invalid */
676/* Opcode VEX.66.0F3A 0x29 - invalid */
677/* Opcode VEX.66.0F3A 0x2a - invalid */
678/* Opcode VEX.66.0F3A 0x2b - invalid */
679/* Opcode VEX.66.0F3A 0x2c - invalid */
680/* Opcode VEX.66.0F3A 0x2d - invalid */
681/* Opcode VEX.66.0F3A 0x2e - invalid */
682/* Opcode VEX.66.0F3A 0x2f - invalid */
683
684
685/* Opcode VEX.66.0F3A 0x30 - invalid */
686/* Opcode VEX.66.0F3A 0x31 - invalid */
687/* Opcode VEX.66.0F3A 0x32 - invalid */
688/* Opcode VEX.66.0F3A 0x33 - invalid */
689/* Opcode VEX.66.0F3A 0x34 - invalid */
690/* Opcode VEX.66.0F3A 0x35 - invalid */
691/* Opcode VEX.66.0F3A 0x36 - invalid */
692/* Opcode VEX.66.0F3A 0x37 - invalid */
693
694
695/** Opcode VEX.66.0F3A 0x38 (vex only). */
696FNIEMOP_DEF(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib)
697{
698 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTI128, vinserti128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * Register, register.
704 */
705 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
706 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
707 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
708 IEM_MC_LOCAL(RTUINT128U, uSrc);
709
710 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
711 IEM_MC_PREPARE_AVX_USAGE();
712
713 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
714 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
715 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
716
717 IEM_MC_ADVANCE_RIP_AND_FINISH();
718 IEM_MC_END();
719 }
720 else
721 {
722 /*
723 * Register, memory.
724 */
725 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
726 IEM_MC_LOCAL(RTUINT128U, uSrc);
727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
728
729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
730 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
731 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
732 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
733 IEM_MC_PREPARE_AVX_USAGE();
734
735 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
736 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
737 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
738
739 IEM_MC_ADVANCE_RIP_AND_FINISH();
740 IEM_MC_END();
741 }
742}
743
744
745/** Opcode VEX.66.0F3A 0x39 (vex only). */
746FNIEMOP_STUB(iemOp_vextracti128_Wdq_Vqq_Ib);
747/* Opcode VEX.66.0F3A 0x3a - invalid */
748/* Opcode VEX.66.0F3A 0x3b - invalid */
749/* Opcode VEX.66.0F3A 0x3c - invalid */
750/* Opcode VEX.66.0F3A 0x3d - invalid */
751/* Opcode VEX.66.0F3A 0x3e - invalid */
752/* Opcode VEX.66.0F3A 0x3f - invalid */
753
754
755/** Opcode VEX.66.0F3A 0x40. */
756FNIEMOP_STUB(iemOp_vdpps_Vx_Hx_Wx_Ib);
757/** Opcode VEX.66.0F3A 0x41, */
758FNIEMOP_STUB(iemOp_vdppd_Vdq_Hdq_Wdq_Ib);
759
760
761/** Opcode VEX.66.0F3A 0x42. */
762FNIEMOP_DEF(iemOp_vmpsadbw_Vx_Hx_Wx_Ib)
763{
764 IEMOP_MNEMONIC4(VEX_RVMI, VMPSADBW, vmpsadbw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
765 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vmpsadbw);
766 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
767}
768
769
770/* Opcode VEX.66.0F3A 0x43 - invalid */
771
772
773/** Opcode VEX.66.0F3A 0x44. */
774FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
775{
776 //IEMOP_MNEMONIC3(VEX_RVM, VPCLMULQDQ, vpclmulqdq, Vdq, Hdq, Wdq, DISOPTYPE_HARMLESS, 0); /* @todo */
777
778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
779 if (IEM_IS_MODRM_REG_MODE(bRm))
780 {
781 /*
782 * Register, register.
783 */
784 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
785 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
786 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
789 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
790 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
791 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
792 IEM_MC_PREPARE_AVX_USAGE();
793 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
795 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
796 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
797 puDst, puSrc1, puSrc2, bImmArg);
798 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
799 IEM_MC_ADVANCE_RIP_AND_FINISH();
800 IEM_MC_END();
801 }
802 else
803 {
804 /*
805 * Register, memory.
806 */
807 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
808 IEM_MC_LOCAL(RTUINT128U, uSrc2);
809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
810 IEM_MC_ARG(PRTUINT128U, puDst, 0);
811 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
812 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
813
814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
815 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
816 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
817 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
818 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
819 IEM_MC_PREPARE_AVX_USAGE();
820
821 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
822 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
823 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
824 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
825 puDst, puSrc1, puSrc2, bImmArg);
826 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
827
828 IEM_MC_ADVANCE_RIP_AND_FINISH();
829 IEM_MC_END();
830 }
831}
832
833
834/* Opcode VEX.66.0F3A 0x45 - invalid */
835
836
837/** Opcode VEX.66.0F3A 0x46 (vex only) */
838FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib)
839{
840 //IEMOP_MNEMONIC4(VEX_RVM, VPERM2I128, vperm2i128, Vqq, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0); /** @todo */
841
842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
843 if (IEM_IS_MODRM_REG_MODE(bRm))
844 {
845 /*
846 * Register, register.
847 */
848 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
849 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
850 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
851 IEM_MC_LOCAL(RTUINT256U, uDst);
852 IEM_MC_LOCAL(RTUINT256U, uSrc1);
853 IEM_MC_LOCAL(RTUINT256U, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
855 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
857 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
858 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
859 IEM_MC_PREPARE_AVX_USAGE();
860 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
861 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
862 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
863 puDst, puSrc1, puSrc2, bImmArg);
864 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
865 IEM_MC_ADVANCE_RIP_AND_FINISH();
866 IEM_MC_END();
867 }
868 else
869 {
870 /*
871 * Register, memory.
872 */
873 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
874 IEM_MC_LOCAL(RTUINT256U, uDst);
875 IEM_MC_LOCAL(RTUINT256U, uSrc1);
876 IEM_MC_LOCAL(RTUINT256U, uSrc2);
877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
878 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
879 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
880 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
881
882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
883 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
884 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
885 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
886 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
887 IEM_MC_PREPARE_AVX_USAGE();
888
889 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
890 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
891 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
892 puDst, puSrc1, puSrc2, bImmArg);
893 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
894
895 IEM_MC_ADVANCE_RIP_AND_FINISH();
896 IEM_MC_END();
897 }
898}
899
900
901/* Opcode VEX.66.0F3A 0x47 - invalid */
902/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
903FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
904/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
905FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
906
907
908/**
909 * Common worker for AVX2 instructions on the forms:
910 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
911 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
912 *
913 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
914 */
915FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
916{
917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
918 if (IEM_IS_MODRM_REG_MODE(bRm))
919 {
920 /*
921 * Register, register.
922 */
923 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
924 if (pVCpu->iem.s.uVexLength)
925 {
926 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
927 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
928 IEM_MC_LOCAL(RTUINT256U, uDst);
929 IEM_MC_LOCAL(RTUINT256U, uSrc1);
930 IEM_MC_LOCAL(RTUINT256U, uSrc2);
931 IEM_MC_LOCAL(RTUINT256U, uSrc3);
932 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
933 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
934 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
935 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
936 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
937 IEM_MC_PREPARE_AVX_USAGE();
938 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
939 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
940 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
941 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
942 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
943 IEM_MC_ADVANCE_RIP_AND_FINISH();
944 IEM_MC_END();
945 }
946 else
947 {
948 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
949 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
950 IEM_MC_ARG(PRTUINT128U, puDst, 0);
951 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
952 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
953 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
954 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
955 IEM_MC_PREPARE_AVX_USAGE();
956 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
957 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
958 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
959 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
960 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
961 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
962 IEM_MC_ADVANCE_RIP_AND_FINISH();
963 IEM_MC_END();
964 }
965 }
966 else
967 {
968 /*
969 * Register, memory.
970 */
971 if (pVCpu->iem.s.uVexLength)
972 {
973 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
974 IEM_MC_LOCAL(RTUINT256U, uDst);
975 IEM_MC_LOCAL(RTUINT256U, uSrc1);
976 IEM_MC_LOCAL(RTUINT256U, uSrc2);
977 IEM_MC_LOCAL(RTUINT256U, uSrc3);
978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
979 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
980 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
981 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
982 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
983
984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
985 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
986
987 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
988 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
989 IEM_MC_PREPARE_AVX_USAGE();
990
991 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
992 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
993 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
994 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
995 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
996 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001 else
1002 {
1003 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1004 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1006 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1007 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1008 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1009 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1010
1011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1012 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1013
1014 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1015 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1016 IEM_MC_PREPARE_AVX_USAGE();
1017
1018 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1019 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1020 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1021 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1022 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1023 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1024
1025 IEM_MC_ADVANCE_RIP_AND_FINISH();
1026 IEM_MC_END();
1027 }
1028 }
1029}
1030
1031
1032/** Opcode VEX.66.0F3A 0x4a (vex only).
1033 * AVX, AVX */
1034FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
1035{
1036 //IEMOP_MNEMONIC4(VEX_RVM, VBLENDVPS, vpblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
1037 IEMOPBLENDOP_INIT_VARS(vblendvps);
1038 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1039}
1040
1041
1042/** Opcode VEX.66.0F3A 0x4b (vex only).
1043 * AVX, AVX */
1044FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
1045{
1046 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVPD, blendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
1047 IEMOPBLENDOP_INIT_VARS(vblendvpd);
1048 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1049}
1050
1051
1052/**
1053 * Common worker for AVX2 instructions on the forms:
1054 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
1055 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
1056 *
1057 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
1058 */
1059FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1060{
1061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1062 if (IEM_IS_MODRM_REG_MODE(bRm))
1063 {
1064 /*
1065 * Register, register.
1066 */
1067 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1068 if (pVCpu->iem.s.uVexLength)
1069 {
1070 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
1071 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
1072 IEM_MC_LOCAL(RTUINT256U, uDst);
1073 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1074 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1075 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1076 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1077 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1078 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1079 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1080 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1081 IEM_MC_PREPARE_AVX_USAGE();
1082 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1083 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1084 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1085 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1086 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1087 IEM_MC_ADVANCE_RIP_AND_FINISH();
1088 IEM_MC_END();
1089 }
1090 else
1091 {
1092 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1093 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1094 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1095 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1096 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1097 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1099 IEM_MC_PREPARE_AVX_USAGE();
1100 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1101 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1102 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1103 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1104 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1105 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1106 IEM_MC_ADVANCE_RIP_AND_FINISH();
1107 IEM_MC_END();
1108 }
1109 }
1110 else
1111 {
1112 /*
1113 * Register, memory.
1114 */
1115 if (pVCpu->iem.s.uVexLength)
1116 {
1117 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
1118 IEM_MC_LOCAL(RTUINT256U, uDst);
1119 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1120 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1121 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1123 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1124 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1125 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1126 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1127
1128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1129 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1130
1131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
1132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1133 IEM_MC_PREPARE_AVX_USAGE();
1134
1135 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1136 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1137 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1138 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1139 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1140 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1141
1142 IEM_MC_ADVANCE_RIP_AND_FINISH();
1143 IEM_MC_END();
1144 }
1145 else
1146 {
1147 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1148 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1150 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1151 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1152 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1153 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1154
1155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1156 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1157
1158 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1159 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1160 IEM_MC_PREPARE_AVX_USAGE();
1161
1162 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1163 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1164 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1165 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1166 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1167 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1168
1169 IEM_MC_ADVANCE_RIP_AND_FINISH();
1170 IEM_MC_END();
1171 }
1172 }
1173}
1174
1175
1176/** Opcode VEX.66.0F3A 0x4c (vex only).
1177 * AVX, AVX2 */
1178FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
1179{
1180 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVB, vpblendvb, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
1181 IEMOPBLENDOP_INIT_VARS(vpblendvb);
1182 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1183}
1184
1185
1186/* Opcode VEX.66.0F3A 0x4d - invalid */
1187/* Opcode VEX.66.0F3A 0x4e - invalid */
1188/* Opcode VEX.66.0F3A 0x4f - invalid */
1189
1190
1191/* Opcode VEX.66.0F3A 0x50 - invalid */
1192/* Opcode VEX.66.0F3A 0x51 - invalid */
1193/* Opcode VEX.66.0F3A 0x52 - invalid */
1194/* Opcode VEX.66.0F3A 0x53 - invalid */
1195/* Opcode VEX.66.0F3A 0x54 - invalid */
1196/* Opcode VEX.66.0F3A 0x55 - invalid */
1197/* Opcode VEX.66.0F3A 0x56 - invalid */
1198/* Opcode VEX.66.0F3A 0x57 - invalid */
1199/* Opcode VEX.66.0F3A 0x58 - invalid */
1200/* Opcode VEX.66.0F3A 0x59 - invalid */
1201/* Opcode VEX.66.0F3A 0x5a - invalid */
1202/* Opcode VEX.66.0F3A 0x5b - invalid */
1203/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
1204FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
1205/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
1206FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
1207/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
1208FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
1209/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
1210FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
1211
1212
1213/** Opcode VEX.66.0F3A 0x60. */
1214FNIEMOP_STUB(iemOp_vpcmpestrm_Vdq_Wdq_Ib);
1215/** Opcode VEX.66.0F3A 0x61, */
1216FNIEMOP_STUB(iemOp_vpcmpestri_Vdq_Wdq_Ib);
1217/** Opcode VEX.66.0F3A 0x62. */
1218FNIEMOP_STUB(iemOp_vpcmpistrm_Vdq_Wdq_Ib);
1219/** Opcode VEX.66.0F3A 0x63*/
1220FNIEMOP_STUB(iemOp_vpcmpistri_Vdq_Wdq_Ib);
1221/* Opcode VEX.66.0F3A 0x64 - invalid */
1222/* Opcode VEX.66.0F3A 0x65 - invalid */
1223/* Opcode VEX.66.0F3A 0x66 - invalid */
1224/* Opcode VEX.66.0F3A 0x67 - invalid */
1225/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
1226FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
1227/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
1228FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
1229/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
1230FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
1231/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
1232FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
1233/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
1234FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
1235/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
1236FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
1237/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
1238FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
1239/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
1240FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
1241
1242/* Opcode VEX.66.0F3A 0x70 - invalid */
1243/* Opcode VEX.66.0F3A 0x71 - invalid */
1244/* Opcode VEX.66.0F3A 0x72 - invalid */
1245/* Opcode VEX.66.0F3A 0x73 - invalid */
1246/* Opcode VEX.66.0F3A 0x74 - invalid */
1247/* Opcode VEX.66.0F3A 0x75 - invalid */
1248/* Opcode VEX.66.0F3A 0x76 - invalid */
1249/* Opcode VEX.66.0F3A 0x77 - invalid */
1250/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
1251FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
1252/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
1253FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
1254/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
1255FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
1256/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
1257FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
1258/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
1259FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
1260/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
1261FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
1262/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
1263FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
1264/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
1265FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
1266
1267/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
1268
1269
1270/* Opcode 0x0f 0xc0 - invalid */
1271/* Opcode 0x0f 0xc1 - invalid */
1272/* Opcode 0x0f 0xc2 - invalid */
1273/* Opcode 0x0f 0xc3 - invalid */
1274/* Opcode 0x0f 0xc4 - invalid */
1275/* Opcode 0x0f 0xc5 - invalid */
1276/* Opcode 0x0f 0xc6 - invalid */
1277/* Opcode 0x0f 0xc7 - invalid */
1278/* Opcode 0x0f 0xc8 - invalid */
1279/* Opcode 0x0f 0xc9 - invalid */
1280/* Opcode 0x0f 0xca - invalid */
1281/* Opcode 0x0f 0xcb - invalid */
1282/* Opcode 0x0f 0xcc - invalid */
1283/* Opcode 0x0f 0xcd - invalid */
1284/* Opcode 0x0f 0xce - invalid */
1285/* Opcode 0x0f 0xcf - invalid */
1286
1287
1288/* Opcode VEX.66.0F3A 0xd0 - invalid */
1289/* Opcode VEX.66.0F3A 0xd1 - invalid */
1290/* Opcode VEX.66.0F3A 0xd2 - invalid */
1291/* Opcode VEX.66.0F3A 0xd3 - invalid */
1292/* Opcode VEX.66.0F3A 0xd4 - invalid */
1293/* Opcode VEX.66.0F3A 0xd5 - invalid */
1294/* Opcode VEX.66.0F3A 0xd6 - invalid */
1295/* Opcode VEX.66.0F3A 0xd7 - invalid */
1296/* Opcode VEX.66.0F3A 0xd8 - invalid */
1297/* Opcode VEX.66.0F3A 0xd9 - invalid */
1298/* Opcode VEX.66.0F3A 0xda - invalid */
1299/* Opcode VEX.66.0F3A 0xdb - invalid */
1300/* Opcode VEX.66.0F3A 0xdc - invalid */
1301/* Opcode VEX.66.0F3A 0xdd - invalid */
1302/* Opcode VEX.66.0F3A 0xde - invalid */
1303/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
1304FNIEMOP_STUB(iemOp_vaeskeygen_Vdq_Wdq_Ib);
1305
1306
1307/**
1308 * @opcode 0xf0
1309 * @oppfx 0xf2
1310 * @opflclass unchanged
1311 */
1312FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
1313{
1314 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
1315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1316 if (IEM_IS_MODRM_REG_MODE(bRm))
1317 {
1318 /*
1319 * Register, register.
1320 */
1321 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1322 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1323 {
1324 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
1325 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1326 IEM_MC_ARG(uint64_t *, pDst, 0);
1327 IEM_MC_ARG(uint64_t, uSrc1, 1);
1328 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1329 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1330 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1331 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1332 IEM_MC_ADVANCE_RIP_AND_FINISH();
1333 IEM_MC_END();
1334 }
1335 else
1336 {
1337 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1338 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1339 IEM_MC_ARG(uint32_t *, pDst, 0);
1340 IEM_MC_ARG(uint32_t, uSrc1, 1);
1341 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1342 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1343 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1344 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1345 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1346 IEM_MC_ADVANCE_RIP_AND_FINISH();
1347 IEM_MC_END();
1348 }
1349 }
1350 else
1351 {
1352 /*
1353 * Register, memory.
1354 */
1355 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1356 {
1357 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
1358 IEM_MC_ARG(uint64_t *, pDst, 0);
1359 IEM_MC_ARG(uint64_t, uSrc1, 1);
1360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1362 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1363 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1364 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1365 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1366 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1367 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1368 IEM_MC_ADVANCE_RIP_AND_FINISH();
1369 IEM_MC_END();
1370 }
1371 else
1372 {
1373 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1374 IEM_MC_ARG(uint32_t *, pDst, 0);
1375 IEM_MC_ARG(uint32_t, uSrc1, 1);
1376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1378 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1379 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1380 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1381 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1382 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1383 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1384 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1385 IEM_MC_ADVANCE_RIP_AND_FINISH();
1386 IEM_MC_END();
1387 }
1388 }
1389}
1390
1391
1392/**
1393 * VEX opcode map \#3.
1394 *
1395 * @sa g_apfnThreeByte0f3a
1396 */
1397const PFNIEMOP g_apfnVexMap3[] =
1398{
1399 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1400 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1401 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermqd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1402 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1403 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1404 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1405 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1406 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1407 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1408 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1409 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1410 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1411 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1412 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1413 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1414 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1415 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1416
1417 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1418 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1419 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1420 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1421 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_RdMb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1422 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_Ew_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1423 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1424 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1425 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1426 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1427 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1428 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1429 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1430 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1431 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1432 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1433
1434 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1435 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1436 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1437 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1438 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1439 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1440 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1441 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1442 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1443 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1444 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1445 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1446 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1447 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1448 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1449 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1450
1451 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1452 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1453 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1454 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1455 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1456 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1457 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1458 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1459 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1460 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1461 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1462 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1463 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1464 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1465 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1466 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1467
1468 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1469 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1470 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1471 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1472 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1473 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1474 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1475 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1476 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1477 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1478 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1479 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1480 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1481 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1482 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1483 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1484
1485 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1486 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1487 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1488 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1489 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1490 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1491 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1492 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1493 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1494 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1495 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1496 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1497 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1498 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1499 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1500 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1501
1502 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1503 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1504 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1505 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1506 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1507 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1508 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1509 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1510 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1511 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1512 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1513 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1514 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1515 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1516 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1517 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1518
1519 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1520 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1521 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1522 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1523 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1524 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1525 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1526 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1527 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1528 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1529 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1530 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1531 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1532 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1533 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1534 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1535
1536 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1537 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1538 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1539 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1540 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1541 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1542 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1543 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1544 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1545 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1546 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1547 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1548 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1549 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1550 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1551 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1552
1553 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1554 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1555 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1556 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1557 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1558 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1559 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1560 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1561 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1562 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1563 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1564 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1565 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1566 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1567 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1568 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1569
1570 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1571 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1572 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1573 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1574 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1575 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1576 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1577 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1578 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1579 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1580 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1581 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1582 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1583 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1584 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1585 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1586
1587 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1588 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1589 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1590 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1591 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1592 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1593 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1594 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1595 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1596 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1597 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1598 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1599 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1600 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1601 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1602 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1603
1604 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1605 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1606 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1607 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1608 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1609 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1610 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1611 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1612 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1613 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1614 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1615 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1616 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1617 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1618 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1619 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1620
1621 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1622 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1623 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1624 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1625 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1626 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1627 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1628 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1629 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1630 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1631 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1632 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1633 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1634 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1635 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1636 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1637
1638 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1639 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1640 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1641 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1642 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1643 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1644 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1645 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1646 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1647 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1648 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1649 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1650 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1651 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1652 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1653 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1654
1655 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
1656 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1657 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1658 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1659 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1660 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1661 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1662 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1663 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1664 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1665 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1666 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1667 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1668 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1669 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1670 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1671};
1672AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
1673
1674/** @} */
1675
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette