VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h@ 103778

Last change on this file since 103778 was 103700, checked in by vboxsync, 9 months ago

VMM/IEM: Implement vpblendd instruction dispatch & emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 71.5 KB
Line 
1/* $Id: IEMAllInstVexMap3.cpp.h 103700 2024-03-06 13:32:01Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
56 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
57 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
58 IEM_MC_LOCAL(RTUINT256U, uDst);
59 IEM_MC_LOCAL(RTUINT256U, uSrc1);
60 IEM_MC_LOCAL(RTUINT256U, uSrc2);
61 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
62 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
63 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
64 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
65 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
66 IEM_MC_PREPARE_AVX_USAGE();
67 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
68 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
69 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
70 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
71 IEM_MC_ADVANCE_RIP_AND_FINISH();
72 IEM_MC_END();
73 }
74 else
75 {
76 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
77 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
78 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
79 IEM_MC_ARG(PRTUINT128U, puDst, 0);
80 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
81 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
82 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
83 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
84 IEM_MC_PREPARE_AVX_USAGE();
85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
87 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
88 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
89 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
90 IEM_MC_ADVANCE_RIP_AND_FINISH();
91 IEM_MC_END();
92 }
93 }
94 else
95 {
96 /*
97 * Register, memory.
98 */
99 if (pVCpu->iem.s.uVexLength)
100 {
101 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
102 IEM_MC_LOCAL(RTUINT256U, uDst);
103 IEM_MC_LOCAL(RTUINT256U, uSrc1);
104 IEM_MC_LOCAL(RTUINT256U, uSrc2);
105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
106 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
107 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
109
110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
111 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
112 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
113 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
114 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
115 IEM_MC_PREPARE_AVX_USAGE();
116
117 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
118 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
119 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
120 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
121
122 IEM_MC_ADVANCE_RIP_AND_FINISH();
123 IEM_MC_END();
124 }
125 else
126 {
127 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
128 IEM_MC_LOCAL(RTUINT128U, uSrc2);
129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
130 IEM_MC_ARG(PRTUINT128U, puDst, 0);
131 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
132 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
133
134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
135 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
136 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
137 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
138 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
139 IEM_MC_PREPARE_AVX_USAGE();
140
141 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
142 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
143 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
144 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
145 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
146
147 IEM_MC_ADVANCE_RIP_AND_FINISH();
148 IEM_MC_END();
149 }
150 }
151}
152
153
154/**
155 * Common worker for AVX instructions on the forms:
156 * - vpermilps/d xmm0, xmm1/mem128, imm8
157 * - vpermilps/d ymm0, ymm1/mem256, imm8
158 *
159 * Takes function table for function w/o implicit state parameter.
160 *
161 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
162 */
163FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF2IMM8, pImpl)
164{
165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
166 if (IEM_IS_MODRM_REG_MODE(bRm))
167 {
168 /*
169 * Register, register.
170 */
171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
172 if (pVCpu->iem.s.uVexLength)
173 {
174 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
175 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
176 IEM_MC_LOCAL(RTUINT256U, uDst);
177 IEM_MC_LOCAL(RTUINT256U, uSrc);
178 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
179 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
180 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
181 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
182 IEM_MC_PREPARE_AVX_USAGE();
183 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
185 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
186 IEM_MC_ADVANCE_RIP_AND_FINISH();
187 IEM_MC_END();
188 }
189 else
190 {
191 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
192 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
193 IEM_MC_ARG(PRTUINT128U, puDst, 0);
194 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
195 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
196 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
197 IEM_MC_PREPARE_AVX_USAGE();
198 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
199 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
201 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
202 IEM_MC_ADVANCE_RIP_AND_FINISH();
203 IEM_MC_END();
204 }
205 }
206 else
207 {
208 /*
209 * Register, memory.
210 */
211 if (pVCpu->iem.s.uVexLength)
212 {
213 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
214 IEM_MC_LOCAL(RTUINT256U, uDst);
215 IEM_MC_LOCAL(RTUINT256U, uSrc);
216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
217 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
218 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
219
220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
221 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
222 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
223 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
224 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
225 IEM_MC_PREPARE_AVX_USAGE();
226
227 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
228 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
229 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
230
231 IEM_MC_ADVANCE_RIP_AND_FINISH();
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
237 IEM_MC_LOCAL(RTUINT128U, uSrc);
238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
239 IEM_MC_ARG(PRTUINT128U, puDst, 0);
240 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
241
242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
243 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
244 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
245 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
246 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
247 IEM_MC_PREPARE_AVX_USAGE();
248
249 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
250 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
252 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
253
254 IEM_MC_ADVANCE_RIP_AND_FINISH();
255 IEM_MC_END();
256 }
257 }
258}
259
260
261/**
262 * Common worker for AVX instructions on the forms:
263 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
264 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
265 *
266 * Takes function table for function w/o implicit state parameter.
267 *
268 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
269 */
270FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
271{
272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
273 if (IEM_IS_MODRM_REG_MODE(bRm))
274 {
275 /*
276 * Register, register.
277 */
278 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
279 if (pVCpu->iem.s.uVexLength)
280 {
281 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
282 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
283 IEM_MC_LOCAL(RTUINT256U, uDst);
284 IEM_MC_LOCAL(RTUINT256U, uSrc1);
285 IEM_MC_LOCAL(RTUINT256U, uSrc2);
286 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
287 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
288 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
289 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
290 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
291 IEM_MC_PREPARE_AVX_USAGE();
292 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
293 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
294 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
295 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
296 IEM_MC_ADVANCE_RIP_AND_FINISH();
297 IEM_MC_END();
298 }
299 else
300 {
301 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
302 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
303 IEM_MC_ARG(PRTUINT128U, puDst, 0);
304 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
305 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
306 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
307 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
308 IEM_MC_PREPARE_AVX_USAGE();
309 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
310 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
311 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
312 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
313 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
314 IEM_MC_ADVANCE_RIP_AND_FINISH();
315 IEM_MC_END();
316 }
317 }
318 else
319 {
320 /*
321 * Register, memory.
322 */
323 if (pVCpu->iem.s.uVexLength)
324 {
325 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
326 IEM_MC_LOCAL(RTUINT256U, uDst);
327 IEM_MC_LOCAL(RTUINT256U, uSrc1);
328 IEM_MC_LOCAL(RTUINT256U, uSrc2);
329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
330 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
331 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
332 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
333
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
335 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
336 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
337 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
338 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
339 IEM_MC_PREPARE_AVX_USAGE();
340
341 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
342 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
343 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
344 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
345
346 IEM_MC_ADVANCE_RIP_AND_FINISH();
347 IEM_MC_END();
348 }
349 else
350 {
351 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
352 IEM_MC_LOCAL(RTUINT128U, uSrc2);
353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
354 IEM_MC_ARG(PRTUINT128U, puDst, 0);
355 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
356 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
357
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
359 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
360 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
361 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
362 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
363 IEM_MC_PREPARE_AVX_USAGE();
364
365 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
366 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
367 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
368 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
369 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
370
371 IEM_MC_ADVANCE_RIP_AND_FINISH();
372 IEM_MC_END();
373 }
374 }
375}
376
377
378/** Opcode VEX.66.0F3A 0x00. */
379FNIEMOP_STUB(iemOp_vpermq_Vqq_Wqq_Ib);
380/** Opcode VEX.66.0F3A 0x01. */
381FNIEMOP_STUB(iemOp_vpermqd_Vqq_Wqq_Ib);
382
383
384/** Opcode VEX.66.0F3A 0x02.
385 * AVX2,AVX2 */
386FNIEMOP_DEF(iemOp_vpblendd_Vx_Hx_Wx_Ib)
387{
388 IEMOP_MNEMONIC3(VEX_RVM, VPBLENDD, vpblendd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
389 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendd);
390 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
391}
392
393
394/* Opcode VEX.66.0F3A 0x03 - invalid */
395
396
397/** Opcode VEX.66.0F3A 0x04.
398 * AVX,AVX */
399FNIEMOP_DEF(iemOp_vpermilps_Vx_Wx_Ib)
400{
401 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPS, vpermilps, Vx, Wx, Ib, DISOPTYPE_HARMLESS, 0); /* @todo */
402 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilps);
403 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
404}
405
406
407/** Opcode VEX.66.0F3A 0x05.
408 * AVX,AVX */
409FNIEMOP_DEF(iemOp_vpermilpd_Vx_Wx_Ib)
410{
411 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPD, vpermilpd, Vx, Wx, Ib, DISOPTYPE_HARMLESS, 0); /* @todo */
412 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilpd);
413 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
414}
415
416
417/** Opcode VEX.66.0F3A 0x06 (vex only) */
418FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib)
419{
420 //IEMOP_MNEMONIC4(VEX_RVM, VPERM2F128, vperm2f128, Vqq, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0); /** @todo */
421
422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
423 if (IEM_IS_MODRM_REG_MODE(bRm))
424 {
425 /*
426 * Register, register.
427 */
428 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
429 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
430 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
431 IEM_MC_LOCAL(RTUINT256U, uDst);
432 IEM_MC_LOCAL(RTUINT256U, uSrc1);
433 IEM_MC_LOCAL(RTUINT256U, uSrc2);
434 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
435 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
436 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
437 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
438 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
439 IEM_MC_PREPARE_AVX_USAGE();
440 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
441 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
442 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
443 puDst, puSrc1, puSrc2, bImmArg);
444 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
445 IEM_MC_ADVANCE_RIP_AND_FINISH();
446 IEM_MC_END();
447 }
448 else
449 {
450 /*
451 * Register, memory.
452 */
453 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
454 IEM_MC_LOCAL(RTUINT256U, uDst);
455 IEM_MC_LOCAL(RTUINT256U, uSrc1);
456 IEM_MC_LOCAL(RTUINT256U, uSrc2);
457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
458 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
459 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
460 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
461
462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
463 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
464 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
465 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
466 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
467 IEM_MC_PREPARE_AVX_USAGE();
468
469 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
470 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
471 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
472 puDst, puSrc1, puSrc2, bImmArg);
473 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
474
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478}
479
480
481/* Opcode VEX.66.0F3A 0x07 - invalid */
482/** Opcode VEX.66.0F3A 0x08. */
483FNIEMOP_STUB(iemOp_vroundps_Vx_Wx_Ib);
484/** Opcode VEX.66.0F3A 0x09. */
485FNIEMOP_STUB(iemOp_vroundpd_Vx_Wx_Ib);
486/** Opcode VEX.66.0F3A 0x0a. */
487FNIEMOP_STUB(iemOp_vroundss_Vss_Wss_Ib);
488/** Opcode VEX.66.0F3A 0x0b. */
489FNIEMOP_STUB(iemOp_vroundsd_Vsd_Wsd_Ib);
490
491
492/** Opcode VEX.66.0F3A 0x0c.
493 * AVX,AVX */
494FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
495{
496 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPS, vblendps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
497 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
498 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
499}
500
501
502/** Opcode VEX.66.0F3A 0x0d.
503 * AVX,AVX */
504FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
505{
506 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPD, vblendpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
507 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
509}
510
511
512/** Opcode VEX.66.0F3A 0x0e.
513 * AVX,AVX2 */
514FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
515{
516 IEMOP_MNEMONIC3(VEX_RVM, VPBLENDW, vpblendw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
517 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
518 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
519}
520
521
522/** Opcode VEX.0F3A 0x0f - invalid. */
523
524
525/** Opcode VEX.66.0F3A 0x0f.
526 * AVX,AVX2 */
527FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
528{
529 IEMOP_MNEMONIC3(VEX_RVM, VPALIGNR, vpalignr, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
530 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
531 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
532}
533
534
535/* Opcode VEX.66.0F3A 0x10 - invalid */
536/* Opcode VEX.66.0F3A 0x11 - invalid */
537/* Opcode VEX.66.0F3A 0x12 - invalid */
538/* Opcode VEX.66.0F3A 0x13 - invalid */
539/** Opcode VEX.66.0F3A 0x14. */
540FNIEMOP_STUB(iemOp_vpextrb_RdMb_Vdq_Ib);
541/** Opcode VEX.66.0F3A 0x15. */
542FNIEMOP_STUB(iemOp_vpextrw_RdMw_Vdq_Ib);
543/** Opcode VEX.66.0F3A 0x16. */
544FNIEMOP_STUB(iemOp_vpextrd_q_RdMw_Vdq_Ib);
545/** Opcode VEX.66.0F3A 0x17. */
546FNIEMOP_STUB(iemOp_vextractps_Ed_Vdq_Ib);
547
548
549/** Opcode VEX.66.0F3A 0x18 (vex only). */
550FNIEMOP_DEF(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib)
551{
552 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTF128, vinsertf128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
554 if (IEM_IS_MODRM_REG_MODE(bRm))
555 {
556 /*
557 * Register, register.
558 */
559 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
560 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
561 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
562 IEM_MC_LOCAL(RTUINT128U, uSrc);
563
564 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
565 IEM_MC_PREPARE_AVX_USAGE();
566
567 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
568 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
569 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
570
571 IEM_MC_ADVANCE_RIP_AND_FINISH();
572 IEM_MC_END();
573 }
574 else
575 {
576 /*
577 * Register, memory.
578 */
579 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
580 IEM_MC_LOCAL(RTUINT128U, uSrc);
581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
582
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
584 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
585 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
586 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
587 IEM_MC_PREPARE_AVX_USAGE();
588
589 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
590 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
591 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
592
593 IEM_MC_ADVANCE_RIP_AND_FINISH();
594 IEM_MC_END();
595 }
596}
597
598
599/** Opcode VEX.66.0F3A 0x19 (vex only). */
600FNIEMOP_STUB(iemOp_vextractf128_Wdq_Vqq_Ib);
601/* Opcode VEX.66.0F3A 0x1a - invalid */
602/* Opcode VEX.66.0F3A 0x1b - invalid */
603/* Opcode VEX.66.0F3A 0x1c - invalid */
604/** Opcode VEX.66.0F3A 0x1d (vex only). */
605FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
606/* Opcode VEX.66.0F3A 0x1e - invalid */
607/* Opcode VEX.66.0F3A 0x1f - invalid */
608
609
610/** Opcode VEX.66.0F3A 0x20. */
611FNIEMOP_STUB(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib);
612/** Opcode VEX.66.0F3A 0x21, */
613FNIEMOP_STUB(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib);
614/** Opcode VEX.66.0F3A 0x22. */
615FNIEMOP_STUB(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib);
616/* Opcode VEX.66.0F3A 0x23 - invalid */
617/* Opcode VEX.66.0F3A 0x24 - invalid */
618/* Opcode VEX.66.0F3A 0x25 - invalid */
619/* Opcode VEX.66.0F3A 0x26 - invalid */
620/* Opcode VEX.66.0F3A 0x27 - invalid */
621/* Opcode VEX.66.0F3A 0x28 - invalid */
622/* Opcode VEX.66.0F3A 0x29 - invalid */
623/* Opcode VEX.66.0F3A 0x2a - invalid */
624/* Opcode VEX.66.0F3A 0x2b - invalid */
625/* Opcode VEX.66.0F3A 0x2c - invalid */
626/* Opcode VEX.66.0F3A 0x2d - invalid */
627/* Opcode VEX.66.0F3A 0x2e - invalid */
628/* Opcode VEX.66.0F3A 0x2f - invalid */
629
630
631/* Opcode VEX.66.0F3A 0x30 - invalid */
632/* Opcode VEX.66.0F3A 0x31 - invalid */
633/* Opcode VEX.66.0F3A 0x32 - invalid */
634/* Opcode VEX.66.0F3A 0x33 - invalid */
635/* Opcode VEX.66.0F3A 0x34 - invalid */
636/* Opcode VEX.66.0F3A 0x35 - invalid */
637/* Opcode VEX.66.0F3A 0x36 - invalid */
638/* Opcode VEX.66.0F3A 0x37 - invalid */
639
640
641/** Opcode VEX.66.0F3A 0x38 (vex only). */
642FNIEMOP_DEF(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib)
643{
644 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTI128, vinserti128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
646 if (IEM_IS_MODRM_REG_MODE(bRm))
647 {
648 /*
649 * Register, register.
650 */
651 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
652 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
653 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
654 IEM_MC_LOCAL(RTUINT128U, uSrc);
655
656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
657 IEM_MC_PREPARE_AVX_USAGE();
658
659 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
660 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
661 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
662
663 IEM_MC_ADVANCE_RIP_AND_FINISH();
664 IEM_MC_END();
665 }
666 else
667 {
668 /*
669 * Register, memory.
670 */
671 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
672 IEM_MC_LOCAL(RTUINT128U, uSrc);
673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
674
675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
676 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
677 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
678 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
679 IEM_MC_PREPARE_AVX_USAGE();
680
681 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
682 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
683 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
684
685 IEM_MC_ADVANCE_RIP_AND_FINISH();
686 IEM_MC_END();
687 }
688}
689
690
691/** Opcode VEX.66.0F3A 0x39 (vex only). */
692FNIEMOP_STUB(iemOp_vextracti128_Wdq_Vqq_Ib);
693/* Opcode VEX.66.0F3A 0x3a - invalid */
694/* Opcode VEX.66.0F3A 0x3b - invalid */
695/* Opcode VEX.66.0F3A 0x3c - invalid */
696/* Opcode VEX.66.0F3A 0x3d - invalid */
697/* Opcode VEX.66.0F3A 0x3e - invalid */
698/* Opcode VEX.66.0F3A 0x3f - invalid */
699
700
701/** Opcode VEX.66.0F3A 0x40. */
702FNIEMOP_STUB(iemOp_vdpps_Vx_Hx_Wx_Ib);
703/** Opcode VEX.66.0F3A 0x41, */
704FNIEMOP_STUB(iemOp_vdppd_Vdq_Hdq_Wdq_Ib);
705
706
707/** Opcode VEX.66.0F3A 0x42. */
708FNIEMOP_DEF(iemOp_vmpsadbw_Vx_Hx_Wx_Ib)
709{
710 IEMOP_MNEMONIC3(VEX_RVM, VMPSADBW, vmpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /** @todo */
711 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vmpsadbw);
712 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
713}
714
715
716/* Opcode VEX.66.0F3A 0x43 - invalid */
717
718
719/** Opcode VEX.66.0F3A 0x44. */
720FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
721{
722 //IEMOP_MNEMONIC3(VEX_RVM, VPCLMULQDQ, vpclmulqdq, Vdq, Hdq, Wdq, DISOPTYPE_HARMLESS, 0); /* @todo */
723
724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
725 if (IEM_IS_MODRM_REG_MODE(bRm))
726 {
727 /*
728 * Register, register.
729 */
730 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
731 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
732 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
733 IEM_MC_ARG(PRTUINT128U, puDst, 0);
734 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
735 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
736 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
737 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
738 IEM_MC_PREPARE_AVX_USAGE();
739 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
740 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
741 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
742 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
743 puDst, puSrc1, puSrc2, bImmArg);
744 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
745 IEM_MC_ADVANCE_RIP_AND_FINISH();
746 IEM_MC_END();
747 }
748 else
749 {
750 /*
751 * Register, memory.
752 */
753 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
754 IEM_MC_LOCAL(RTUINT128U, uSrc2);
755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
756 IEM_MC_ARG(PRTUINT128U, puDst, 0);
757 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
758 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
759
760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
761 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
762 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
763 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
764 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
765 IEM_MC_PREPARE_AVX_USAGE();
766
767 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
768 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
769 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
770 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
771 puDst, puSrc1, puSrc2, bImmArg);
772 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
773
774 IEM_MC_ADVANCE_RIP_AND_FINISH();
775 IEM_MC_END();
776 }
777}
778
779
780/* Opcode VEX.66.0F3A 0x45 - invalid */
781
782
783/** Opcode VEX.66.0F3A 0x46 (vex only) */
784FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib)
785{
786 //IEMOP_MNEMONIC4(VEX_RVM, VPERM2I128, vperm2i128, Vqq, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0); /** @todo */
787
788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
789 if (IEM_IS_MODRM_REG_MODE(bRm))
790 {
791 /*
792 * Register, register.
793 */
794 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
795 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
796 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
797 IEM_MC_LOCAL(RTUINT256U, uDst);
798 IEM_MC_LOCAL(RTUINT256U, uSrc1);
799 IEM_MC_LOCAL(RTUINT256U, uSrc2);
800 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
801 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
802 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
803 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
804 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
805 IEM_MC_PREPARE_AVX_USAGE();
806 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
807 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
808 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
809 puDst, puSrc1, puSrc2, bImmArg);
810 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
811 IEM_MC_ADVANCE_RIP_AND_FINISH();
812 IEM_MC_END();
813 }
814 else
815 {
816 /*
817 * Register, memory.
818 */
819 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
820 IEM_MC_LOCAL(RTUINT256U, uDst);
821 IEM_MC_LOCAL(RTUINT256U, uSrc1);
822 IEM_MC_LOCAL(RTUINT256U, uSrc2);
823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
824 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
825 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
826 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
827
828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
829 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
830 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
831 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
832 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
833 IEM_MC_PREPARE_AVX_USAGE();
834
835 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
836 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
837 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
838 puDst, puSrc1, puSrc2, bImmArg);
839 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
840
841 IEM_MC_ADVANCE_RIP_AND_FINISH();
842 IEM_MC_END();
843 }
844}
845
846
847/* Opcode VEX.66.0F3A 0x47 - invalid */
848/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
849FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
850/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
851FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
852
853
854/**
855 * Common worker for AVX2 instructions on the forms:
856 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
857 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
858 *
859 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
860 */
861FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
862{
863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
864 if (IEM_IS_MODRM_REG_MODE(bRm))
865 {
866 /*
867 * Register, register.
868 */
869 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
870 if (pVCpu->iem.s.uVexLength)
871 {
872 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
873 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
874 IEM_MC_LOCAL(RTUINT256U, uDst);
875 IEM_MC_LOCAL(RTUINT256U, uSrc1);
876 IEM_MC_LOCAL(RTUINT256U, uSrc2);
877 IEM_MC_LOCAL(RTUINT256U, uSrc3);
878 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
879 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
880 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
881 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
882 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
883 IEM_MC_PREPARE_AVX_USAGE();
884 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
885 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
886 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
887 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
888 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
889 IEM_MC_ADVANCE_RIP_AND_FINISH();
890 IEM_MC_END();
891 }
892 else
893 {
894 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
895 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
896 IEM_MC_ARG(PRTUINT128U, puDst, 0);
897 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
898 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
899 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
900 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
901 IEM_MC_PREPARE_AVX_USAGE();
902 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
903 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
904 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
905 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
906 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
907 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
908 IEM_MC_ADVANCE_RIP_AND_FINISH();
909 IEM_MC_END();
910 }
911 }
912 else
913 {
914 /*
915 * Register, memory.
916 */
917 if (pVCpu->iem.s.uVexLength)
918 {
919 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
920 IEM_MC_LOCAL(RTUINT256U, uDst);
921 IEM_MC_LOCAL(RTUINT256U, uSrc1);
922 IEM_MC_LOCAL(RTUINT256U, uSrc2);
923 IEM_MC_LOCAL(RTUINT256U, uSrc3);
924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
925 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
926 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
927 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
928 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
929
930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
931 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
932
933 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
934 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
935 IEM_MC_PREPARE_AVX_USAGE();
936
937 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
938 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
939 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
940 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
941 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
942 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
943
944 IEM_MC_ADVANCE_RIP_AND_FINISH();
945 IEM_MC_END();
946 }
947 else
948 {
949 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
950 IEM_MC_LOCAL(RTUINT128U, uSrc2);
951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
952 IEM_MC_ARG(PRTUINT128U, puDst, 0);
953 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
954 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
955 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
956
957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
958 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
959
960 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
961 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
962 IEM_MC_PREPARE_AVX_USAGE();
963
964 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
965 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
967 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
968 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
969 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 }
975}
976
977
978/** Opcode VEX.66.0F3A 0x4a (vex only).
979 * AVX, AVX */
980FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
981{
982 //IEMOP_MNEMONIC4(VEX_RVM, VBLENDVPS, vpblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
983 IEMOPBLENDOP_INIT_VARS(vblendvps);
984 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
985}
986
987
988/** Opcode VEX.66.0F3A 0x4b (vex only).
989 * AVX, AVX */
990FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
991{
992 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVPD, blendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
993 IEMOPBLENDOP_INIT_VARS(vblendvpd);
994 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
995}
996
997
998/**
999 * Common worker for AVX2 instructions on the forms:
1000 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
1001 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
1002 *
1003 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
1004 */
1005FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1006{
1007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1008 if (IEM_IS_MODRM_REG_MODE(bRm))
1009 {
1010 /*
1011 * Register, register.
1012 */
1013 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1014 if (pVCpu->iem.s.uVexLength)
1015 {
1016 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
1017 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
1018 IEM_MC_LOCAL(RTUINT256U, uDst);
1019 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1020 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1021 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1022 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1023 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1024 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1025 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1026 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1027 IEM_MC_PREPARE_AVX_USAGE();
1028 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1029 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1030 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1031 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1032 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1033 IEM_MC_ADVANCE_RIP_AND_FINISH();
1034 IEM_MC_END();
1035 }
1036 else
1037 {
1038 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1039 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1040 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1041 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1042 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1043 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1045 IEM_MC_PREPARE_AVX_USAGE();
1046 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1047 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1048 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1049 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1050 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1051 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1052 IEM_MC_ADVANCE_RIP_AND_FINISH();
1053 IEM_MC_END();
1054 }
1055 }
1056 else
1057 {
1058 /*
1059 * Register, memory.
1060 */
1061 if (pVCpu->iem.s.uVexLength)
1062 {
1063 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
1064 IEM_MC_LOCAL(RTUINT256U, uDst);
1065 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1066 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1067 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1069 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1070 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1071 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1072 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1073
1074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1075 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1076
1077 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
1078 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1079 IEM_MC_PREPARE_AVX_USAGE();
1080
1081 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1082 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1083 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1084 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1085 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1086 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1087
1088 IEM_MC_ADVANCE_RIP_AND_FINISH();
1089 IEM_MC_END();
1090 }
1091 else
1092 {
1093 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1094 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1096 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1097 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1098 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1099 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1100
1101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1102 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1103
1104 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1105 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1106 IEM_MC_PREPARE_AVX_USAGE();
1107
1108 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1109 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1110 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1111 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
1112 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1113 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1114
1115 IEM_MC_ADVANCE_RIP_AND_FINISH();
1116 IEM_MC_END();
1117 }
1118 }
1119}
1120
1121
1122/** Opcode VEX.66.0F3A 0x4c (vex only).
1123 * AVX, AVX2 */
1124FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
1125{
1126 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVB, vpblendvb, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
1127 IEMOPBLENDOP_INIT_VARS(vpblendvb);
1128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1129}
1130
1131
1132/* Opcode VEX.66.0F3A 0x4d - invalid */
1133/* Opcode VEX.66.0F3A 0x4e - invalid */
1134/* Opcode VEX.66.0F3A 0x4f - invalid */
1135
1136
1137/* Opcode VEX.66.0F3A 0x50 - invalid */
1138/* Opcode VEX.66.0F3A 0x51 - invalid */
1139/* Opcode VEX.66.0F3A 0x52 - invalid */
1140/* Opcode VEX.66.0F3A 0x53 - invalid */
1141/* Opcode VEX.66.0F3A 0x54 - invalid */
1142/* Opcode VEX.66.0F3A 0x55 - invalid */
1143/* Opcode VEX.66.0F3A 0x56 - invalid */
1144/* Opcode VEX.66.0F3A 0x57 - invalid */
1145/* Opcode VEX.66.0F3A 0x58 - invalid */
1146/* Opcode VEX.66.0F3A 0x59 - invalid */
1147/* Opcode VEX.66.0F3A 0x5a - invalid */
1148/* Opcode VEX.66.0F3A 0x5b - invalid */
1149/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
1150FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
1151/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
1152FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
1153/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
1154FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
1155/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
1156FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
1157
1158
1159/** Opcode VEX.66.0F3A 0x60. */
1160FNIEMOP_STUB(iemOp_vpcmpestrm_Vdq_Wdq_Ib);
1161/** Opcode VEX.66.0F3A 0x61, */
1162FNIEMOP_STUB(iemOp_vpcmpestri_Vdq_Wdq_Ib);
1163/** Opcode VEX.66.0F3A 0x62. */
1164FNIEMOP_STUB(iemOp_vpcmpistrm_Vdq_Wdq_Ib);
1165/** Opcode VEX.66.0F3A 0x63*/
1166FNIEMOP_STUB(iemOp_vpcmpistri_Vdq_Wdq_Ib);
1167/* Opcode VEX.66.0F3A 0x64 - invalid */
1168/* Opcode VEX.66.0F3A 0x65 - invalid */
1169/* Opcode VEX.66.0F3A 0x66 - invalid */
1170/* Opcode VEX.66.0F3A 0x67 - invalid */
1171/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
1172FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
1173/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
1174FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
1175/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
1176FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
1177/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
1178FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
1179/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
1180FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
1181/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
1182FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
1183/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
1184FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
1185/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
1186FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
1187
1188/* Opcode VEX.66.0F3A 0x70 - invalid */
1189/* Opcode VEX.66.0F3A 0x71 - invalid */
1190/* Opcode VEX.66.0F3A 0x72 - invalid */
1191/* Opcode VEX.66.0F3A 0x73 - invalid */
1192/* Opcode VEX.66.0F3A 0x74 - invalid */
1193/* Opcode VEX.66.0F3A 0x75 - invalid */
1194/* Opcode VEX.66.0F3A 0x76 - invalid */
1195/* Opcode VEX.66.0F3A 0x77 - invalid */
1196/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
1197FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
1198/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
1199FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
1200/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
1201FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
1202/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
1203FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
1204/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
1205FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
1206/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
1207FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
1208/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
1209FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
1210/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
1211FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
1212
1213/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
1214
1215
1216/* Opcode 0x0f 0xc0 - invalid */
1217/* Opcode 0x0f 0xc1 - invalid */
1218/* Opcode 0x0f 0xc2 - invalid */
1219/* Opcode 0x0f 0xc3 - invalid */
1220/* Opcode 0x0f 0xc4 - invalid */
1221/* Opcode 0x0f 0xc5 - invalid */
1222/* Opcode 0x0f 0xc6 - invalid */
1223/* Opcode 0x0f 0xc7 - invalid */
1224/* Opcode 0x0f 0xc8 - invalid */
1225/* Opcode 0x0f 0xc9 - invalid */
1226/* Opcode 0x0f 0xca - invalid */
1227/* Opcode 0x0f 0xcb - invalid */
1228/* Opcode 0x0f 0xcc - invalid */
1229/* Opcode 0x0f 0xcd - invalid */
1230/* Opcode 0x0f 0xce - invalid */
1231/* Opcode 0x0f 0xcf - invalid */
1232
1233
1234/* Opcode VEX.66.0F3A 0xd0 - invalid */
1235/* Opcode VEX.66.0F3A 0xd1 - invalid */
1236/* Opcode VEX.66.0F3A 0xd2 - invalid */
1237/* Opcode VEX.66.0F3A 0xd3 - invalid */
1238/* Opcode VEX.66.0F3A 0xd4 - invalid */
1239/* Opcode VEX.66.0F3A 0xd5 - invalid */
1240/* Opcode VEX.66.0F3A 0xd6 - invalid */
1241/* Opcode VEX.66.0F3A 0xd7 - invalid */
1242/* Opcode VEX.66.0F3A 0xd8 - invalid */
1243/* Opcode VEX.66.0F3A 0xd9 - invalid */
1244/* Opcode VEX.66.0F3A 0xda - invalid */
1245/* Opcode VEX.66.0F3A 0xdb - invalid */
1246/* Opcode VEX.66.0F3A 0xdc - invalid */
1247/* Opcode VEX.66.0F3A 0xdd - invalid */
1248/* Opcode VEX.66.0F3A 0xde - invalid */
1249/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
1250FNIEMOP_STUB(iemOp_vaeskeygen_Vdq_Wdq_Ib);
1251
1252
1253/**
1254 * @opcode 0xf0
1255 * @oppfx 0xf2
1256 * @opflclass unchanged
1257 */
1258FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
1259{
1260 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
1261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1262 if (IEM_IS_MODRM_REG_MODE(bRm))
1263 {
1264 /*
1265 * Register, register.
1266 */
1267 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1268 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1269 {
1270 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
1271 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1272 IEM_MC_ARG(uint64_t *, pDst, 0);
1273 IEM_MC_ARG(uint64_t, uSrc1, 1);
1274 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1275 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1276 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1277 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1278 IEM_MC_ADVANCE_RIP_AND_FINISH();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1284 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1285 IEM_MC_ARG(uint32_t *, pDst, 0);
1286 IEM_MC_ARG(uint32_t, uSrc1, 1);
1287 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1288 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1289 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1290 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1292 IEM_MC_ADVANCE_RIP_AND_FINISH();
1293 IEM_MC_END();
1294 }
1295 }
1296 else
1297 {
1298 /*
1299 * Register, memory.
1300 */
1301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1302 {
1303 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
1304 IEM_MC_ARG(uint64_t *, pDst, 0);
1305 IEM_MC_ARG(uint64_t, uSrc1, 1);
1306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1308 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1309 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1310 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1311 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1312 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1313 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1314 IEM_MC_ADVANCE_RIP_AND_FINISH();
1315 IEM_MC_END();
1316 }
1317 else
1318 {
1319 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1320 IEM_MC_ARG(uint32_t *, pDst, 0);
1321 IEM_MC_ARG(uint32_t, uSrc1, 1);
1322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1324 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1325 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1326 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1327 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1328 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1329 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1330 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1331 IEM_MC_ADVANCE_RIP_AND_FINISH();
1332 IEM_MC_END();
1333 }
1334 }
1335}
1336
1337
1338/**
1339 * VEX opcode map \#3.
1340 *
1341 * @sa g_apfnThreeByte0f3a
1342 */
1343const PFNIEMOP g_apfnVexMap3[] =
1344{
1345 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1346 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1347 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermqd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1348 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1349 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1350 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1351 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1352 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1353 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1354 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1355 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1356 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1357 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1358 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1359 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1360 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1361 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1362
1363 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1364 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1365 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1366 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1367 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_RdMb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1368 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1369 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1370 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1371 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1372 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1373 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1374 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1375 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1376 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1377 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1378 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1379
1380 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1381 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1382 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1383 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1384 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1385 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1386 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1387 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1388 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1389 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1390 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1391 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1392 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1393 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1394 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1395 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1396
1397 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1398 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1399 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1400 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1401 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1402 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1403 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1404 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1405 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1406 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1407 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1408 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1409 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1410 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1411 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1412 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1413
1414 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1415 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1416 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1417 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1418 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1419 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1420 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1421 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1422 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1423 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1424 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1425 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1426 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1427 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1428 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1429 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1430
1431 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1432 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1433 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1434 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1435 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1436 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1437 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1438 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1439 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1440 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1441 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1442 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1443 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1444 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1445 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1446 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1447
1448 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1449 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1450 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1451 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1452 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1453 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1454 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1455 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1456 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1457 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1458 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1459 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1460 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1461 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1462 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1463 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1464
1465 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1466 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1467 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1468 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1469 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1470 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1471 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1472 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1473 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1474 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1475 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1476 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1477 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1478 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1479 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1480 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1481
1482 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1483 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1484 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1485 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1486 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1487 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1488 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1489 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1490 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1491 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1492 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1493 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1494 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1495 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1496 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1497 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1498
1499 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1500 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1501 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1502 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1503 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1504 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1505 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1506 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1507 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1508 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1509 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1510 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1511 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1512 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1513 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1514 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1515
1516 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1517 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1518 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1519 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1520 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1521 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1522 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1523 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1524 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1525 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1526 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1527 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1528 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1529 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1530 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1531 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1532
1533 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1534 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1535 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1536 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1537 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1538 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1539 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1540 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1541 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1542 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1543 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1544 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1545 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1546 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1547 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1548 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1549
1550 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1551 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1552 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1553 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1554 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1555 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1556 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1557 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1558 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1559 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1560 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1561 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1562 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1563 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1564 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1565 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1566
1567 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1568 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1569 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1570 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1571 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1572 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1573 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1574 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1575 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1576 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1577 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1578 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1579 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1580 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1581 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1582 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1583
1584 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1585 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1586 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1587 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1588 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1589 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1590 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1591 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1592 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1593 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1594 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1595 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1596 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1597 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1598 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1599 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1600
1601 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
1602 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1603 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1604 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1605 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1606 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1607 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1608 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1609 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1610 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1611 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1612 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1613 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1614 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1615 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1616 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1617};
1618AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
1619
1620/** @} */
1621
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette