VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h@ 100764

Last change on this file since 100764 was 100740, checked in by vboxsync, 17 months ago

VMM/IEM: Split up IEMAllInstInterpretOnly.cpp into four files to speed up compilation. This requires making all the tables public. It also requires duplicating the common functions in IEMAllInstCommon.cpp.h, but that shouldn't be a big deal of code. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 65.0 KB
Line 
1/* $Id: IEMAllInstVexMap3.cpp.h 100740 2023-07-30 20:08:25Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
56 IEM_MC_BEGIN(4, 3);
57 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
58 IEM_MC_LOCAL(RTUINT256U, uDst);
59 IEM_MC_LOCAL(RTUINT256U, uSrc1);
60 IEM_MC_LOCAL(RTUINT256U, uSrc2);
61 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
62 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
63 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
64 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
65 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
66 IEM_MC_PREPARE_AVX_USAGE();
67 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
68 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
69 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
70 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
71 IEM_MC_ADVANCE_RIP_AND_FINISH();
72 IEM_MC_END();
73 }
74 else
75 {
76 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
77 IEM_MC_BEGIN(4, 0);
78 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
79 IEM_MC_ARG(PRTUINT128U, puDst, 0);
80 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
81 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
82 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
83 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
84 IEM_MC_PREPARE_AVX_USAGE();
85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
87 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
88 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
89 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
90 IEM_MC_ADVANCE_RIP_AND_FINISH();
91 IEM_MC_END();
92 }
93 }
94 else
95 {
96 /*
97 * Register, memory.
98 */
99 if (pVCpu->iem.s.uVexLength)
100 {
101 IEM_MC_BEGIN(4, 4);
102 IEM_MC_LOCAL(RTUINT256U, uDst);
103 IEM_MC_LOCAL(RTUINT256U, uSrc1);
104 IEM_MC_LOCAL(RTUINT256U, uSrc2);
105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
106 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
107 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
109
110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
111 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
112 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
113 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
114 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
115 IEM_MC_PREPARE_AVX_USAGE();
116
117 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
118 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
119 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
120 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
121
122 IEM_MC_ADVANCE_RIP_AND_FINISH();
123 IEM_MC_END();
124 }
125 else
126 {
127 IEM_MC_BEGIN(4, 2);
128 IEM_MC_LOCAL(RTUINT128U, uSrc2);
129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
130 IEM_MC_ARG(PRTUINT128U, puDst, 0);
131 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
132 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
133
134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
135 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
136 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
137 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
138 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
139 IEM_MC_PREPARE_AVX_USAGE();
140
141 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
142 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
143 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
144 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
145 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
146
147 IEM_MC_ADVANCE_RIP_AND_FINISH();
148 IEM_MC_END();
149 }
150 }
151}
152
153
154/**
155 * Common worker for AVX instructions on the forms:
156 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
157 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
158 *
159 * Takes function table for function w/o implicit state parameter.
160 *
161 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
162 */
163FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
164{
165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
166 if (IEM_IS_MODRM_REG_MODE(bRm))
167 {
168 /*
169 * Register, register.
170 */
171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
172 if (pVCpu->iem.s.uVexLength)
173 {
174 IEM_MC_BEGIN(4, 3);
175 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
176 IEM_MC_LOCAL(RTUINT256U, uDst);
177 IEM_MC_LOCAL(RTUINT256U, uSrc1);
178 IEM_MC_LOCAL(RTUINT256U, uSrc2);
179 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
181 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
183 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
184 IEM_MC_PREPARE_AVX_USAGE();
185 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
186 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
187 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
188 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 IEM_MC_BEGIN(4, 0);
195 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
196 IEM_MC_ARG(PRTUINT128U, puDst, 0);
197 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
198 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
199 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
200 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
201 IEM_MC_PREPARE_AVX_USAGE();
202 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
203 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
205 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
206 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
207 IEM_MC_ADVANCE_RIP_AND_FINISH();
208 IEM_MC_END();
209 }
210 }
211 else
212 {
213 /*
214 * Register, memory.
215 */
216 if (pVCpu->iem.s.uVexLength)
217 {
218 IEM_MC_BEGIN(4, 4);
219 IEM_MC_LOCAL(RTUINT256U, uDst);
220 IEM_MC_LOCAL(RTUINT256U, uSrc1);
221 IEM_MC_LOCAL(RTUINT256U, uSrc2);
222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
223 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
224 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
225 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
226
227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
228 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
229 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
230 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
231 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
232 IEM_MC_PREPARE_AVX_USAGE();
233
234 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
235 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
236 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
237 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
238
239 IEM_MC_ADVANCE_RIP_AND_FINISH();
240 IEM_MC_END();
241 }
242 else
243 {
244 IEM_MC_BEGIN(4, 2);
245 IEM_MC_LOCAL(RTUINT128U, uSrc2);
246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
247 IEM_MC_ARG(PRTUINT128U, puDst, 0);
248 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
249 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
250
251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
252 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
253 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
254 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
255 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
256 IEM_MC_PREPARE_AVX_USAGE();
257
258 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
259 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
260 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
261 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
262 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
263
264 IEM_MC_ADVANCE_RIP_AND_FINISH();
265 IEM_MC_END();
266 }
267 }
268}
269
270
271/** Opcode VEX.66.0F3A 0x00. */
272FNIEMOP_STUB(iemOp_vpermq_Vqq_Wqq_Ib);
273/** Opcode VEX.66.0F3A 0x01. */
274FNIEMOP_STUB(iemOp_vpermqd_Vqq_Wqq_Ib);
275/** Opcode VEX.66.0F3A 0x02. */
276FNIEMOP_STUB(iemOp_vpblendd_Vx_Wx_Ib);
277/* Opcode VEX.66.0F3A 0x03 - invalid */
278/** Opcode VEX.66.0F3A 0x04. */
279FNIEMOP_STUB(iemOp_vpermilps_Vx_Wx_Ib);
280/** Opcode VEX.66.0F3A 0x05. */
281FNIEMOP_STUB(iemOp_vpermilpd_Vx_Wx_Ib);
282
283
284/** Opcode VEX.66.0F3A 0x06 (vex only) */
285FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib)
286{
287 //IEMOP_MNEMONIC4(VEX_RVM, VPERM2F128, vperm2f128, Vqq, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0); /** @todo */
288
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * Register, register.
294 */
295 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
296 IEM_MC_BEGIN(4, 3);
297 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
298 IEM_MC_LOCAL(RTUINT256U, uDst);
299 IEM_MC_LOCAL(RTUINT256U, uSrc1);
300 IEM_MC_LOCAL(RTUINT256U, uSrc2);
301 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
302 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
303 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
304 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
305 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
306 IEM_MC_PREPARE_AVX_USAGE();
307 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
308 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
309 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
310 puDst, puSrc1, puSrc2, bImmArg);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP_AND_FINISH();
313 IEM_MC_END();
314 }
315 else
316 {
317 /*
318 * Register, memory.
319 */
320 IEM_MC_BEGIN(4, 2);
321 IEM_MC_LOCAL(RTUINT256U, uDst);
322 IEM_MC_LOCAL(RTUINT256U, uSrc1);
323 IEM_MC_LOCAL(RTUINT256U, uSrc2);
324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
325 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
326 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
327 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
328
329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
330 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
331 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
332 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
333 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
334 IEM_MC_PREPARE_AVX_USAGE();
335
336 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
337 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
338 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
339 puDst, puSrc1, puSrc2, bImmArg);
340 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
341
342 IEM_MC_ADVANCE_RIP_AND_FINISH();
343 IEM_MC_END();
344 }
345}
346
347
348/* Opcode VEX.66.0F3A 0x07 - invalid */
349/** Opcode VEX.66.0F3A 0x08. */
350FNIEMOP_STUB(iemOp_vroundps_Vx_Wx_Ib);
351/** Opcode VEX.66.0F3A 0x09. */
352FNIEMOP_STUB(iemOp_vroundpd_Vx_Wx_Ib);
353/** Opcode VEX.66.0F3A 0x0a. */
354FNIEMOP_STUB(iemOp_vroundss_Vss_Wss_Ib);
355/** Opcode VEX.66.0F3A 0x0b. */
356FNIEMOP_STUB(iemOp_vroundsd_Vsd_Wsd_Ib);
357
358
359/** Opcode VEX.66.0F3A 0x0c.
360 * AVX,AVX */
361FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
362{
363 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPS, vblendps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
364 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
365 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
366}
367
368
369/** Opcode VEX.66.0F3A 0x0d.
370 * AVX,AVX */
371FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
372{
373 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPD, vblendpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
374 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
375 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
376}
377
378
379/** Opcode VEX.66.0F3A 0x0e.
380 * AVX,AVX2 */
381FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
382{
383 IEMOP_MNEMONIC3(VEX_RVM, VPBLENDW, vpblendw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
384 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
385 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
386}
387
388
389/** Opcode VEX.0F3A 0x0f - invalid. */
390
391
392/** Opcode VEX.66.0F3A 0x0f.
393 * AVX,AVX2 */
394FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
395{
396 IEMOP_MNEMONIC3(VEX_RVM, VPALIGNR, vpalignr, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
397 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
398 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
399}
400
401
402/* Opcode VEX.66.0F3A 0x10 - invalid */
403/* Opcode VEX.66.0F3A 0x11 - invalid */
404/* Opcode VEX.66.0F3A 0x12 - invalid */
405/* Opcode VEX.66.0F3A 0x13 - invalid */
406/** Opcode VEX.66.0F3A 0x14. */
407FNIEMOP_STUB(iemOp_vpextrb_RdMb_Vdq_Ib);
408/** Opcode VEX.66.0F3A 0x15. */
409FNIEMOP_STUB(iemOp_vpextrw_RdMw_Vdq_Ib);
410/** Opcode VEX.66.0F3A 0x16. */
411FNIEMOP_STUB(iemOp_vpextrd_q_RdMw_Vdq_Ib);
412/** Opcode VEX.66.0F3A 0x17. */
413FNIEMOP_STUB(iemOp_vextractps_Ed_Vdq_Ib);
414
415
416/** Opcode VEX.66.0F3A 0x18 (vex only). */
417FNIEMOP_DEF(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib)
418{
419 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTF128, vinsertf128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
421 if (IEM_IS_MODRM_REG_MODE(bRm))
422 {
423 /*
424 * Register, register.
425 */
426 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
427 IEM_MC_BEGIN(0, 1);
428 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
429 IEM_MC_LOCAL(RTUINT128U, uSrc);
430
431 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
432 IEM_MC_PREPARE_AVX_USAGE();
433
434 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
435 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
436 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
437
438 IEM_MC_ADVANCE_RIP_AND_FINISH();
439 IEM_MC_END();
440 }
441 else
442 {
443 /*
444 * Register, memory.
445 */
446 IEM_MC_BEGIN(0, 2);
447 IEM_MC_LOCAL(RTUINT128U, uSrc);
448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
449
450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
451 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
452 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
453 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
454 IEM_MC_PREPARE_AVX_USAGE();
455
456 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
457 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
458 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
459
460 IEM_MC_ADVANCE_RIP_AND_FINISH();
461 IEM_MC_END();
462 }
463}
464
465
466/** Opcode VEX.66.0F3A 0x19 (vex only). */
467FNIEMOP_STUB(iemOp_vextractf128_Wdq_Vqq_Ib);
468/* Opcode VEX.66.0F3A 0x1a - invalid */
469/* Opcode VEX.66.0F3A 0x1b - invalid */
470/* Opcode VEX.66.0F3A 0x1c - invalid */
471/** Opcode VEX.66.0F3A 0x1d (vex only). */
472FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
473/* Opcode VEX.66.0F3A 0x1e - invalid */
474/* Opcode VEX.66.0F3A 0x1f - invalid */
475
476
477/** Opcode VEX.66.0F3A 0x20. */
478FNIEMOP_STUB(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib);
479/** Opcode VEX.66.0F3A 0x21, */
480FNIEMOP_STUB(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib);
481/** Opcode VEX.66.0F3A 0x22. */
482FNIEMOP_STUB(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib);
483/* Opcode VEX.66.0F3A 0x23 - invalid */
484/* Opcode VEX.66.0F3A 0x24 - invalid */
485/* Opcode VEX.66.0F3A 0x25 - invalid */
486/* Opcode VEX.66.0F3A 0x26 - invalid */
487/* Opcode VEX.66.0F3A 0x27 - invalid */
488/* Opcode VEX.66.0F3A 0x28 - invalid */
489/* Opcode VEX.66.0F3A 0x29 - invalid */
490/* Opcode VEX.66.0F3A 0x2a - invalid */
491/* Opcode VEX.66.0F3A 0x2b - invalid */
492/* Opcode VEX.66.0F3A 0x2c - invalid */
493/* Opcode VEX.66.0F3A 0x2d - invalid */
494/* Opcode VEX.66.0F3A 0x2e - invalid */
495/* Opcode VEX.66.0F3A 0x2f - invalid */
496
497
498/* Opcode VEX.66.0F3A 0x30 - invalid */
499/* Opcode VEX.66.0F3A 0x31 - invalid */
500/* Opcode VEX.66.0F3A 0x32 - invalid */
501/* Opcode VEX.66.0F3A 0x33 - invalid */
502/* Opcode VEX.66.0F3A 0x34 - invalid */
503/* Opcode VEX.66.0F3A 0x35 - invalid */
504/* Opcode VEX.66.0F3A 0x36 - invalid */
505/* Opcode VEX.66.0F3A 0x37 - invalid */
506
507
508/** Opcode VEX.66.0F3A 0x38 (vex only). */
509FNIEMOP_DEF(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib)
510{
511 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTI128, vinserti128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
513 if (IEM_IS_MODRM_REG_MODE(bRm))
514 {
515 /*
516 * Register, register.
517 */
518 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
519 IEM_MC_BEGIN(0, 1);
520 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
521 IEM_MC_LOCAL(RTUINT128U, uSrc);
522
523 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
524 IEM_MC_PREPARE_AVX_USAGE();
525
526 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
527 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
528 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
529
530 IEM_MC_ADVANCE_RIP_AND_FINISH();
531 IEM_MC_END();
532 }
533 else
534 {
535 /*
536 * Register, memory.
537 */
538 IEM_MC_BEGIN(0, 2);
539 IEM_MC_LOCAL(RTUINT128U, uSrc);
540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
541
542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
543 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
544 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
545 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
546 IEM_MC_PREPARE_AVX_USAGE();
547
548 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
549 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
550 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
551
552 IEM_MC_ADVANCE_RIP_AND_FINISH();
553 IEM_MC_END();
554 }
555}
556
557
558/** Opcode VEX.66.0F3A 0x39 (vex only). */
559FNIEMOP_STUB(iemOp_vextracti128_Wdq_Vqq_Ib);
560/* Opcode VEX.66.0F3A 0x3a - invalid */
561/* Opcode VEX.66.0F3A 0x3b - invalid */
562/* Opcode VEX.66.0F3A 0x3c - invalid */
563/* Opcode VEX.66.0F3A 0x3d - invalid */
564/* Opcode VEX.66.0F3A 0x3e - invalid */
565/* Opcode VEX.66.0F3A 0x3f - invalid */
566
567
568/** Opcode VEX.66.0F3A 0x40. */
569FNIEMOP_STUB(iemOp_vdpps_Vx_Hx_Wx_Ib);
570/** Opcode VEX.66.0F3A 0x41, */
571FNIEMOP_STUB(iemOp_vdppd_Vdq_Hdq_Wdq_Ib);
572/** Opcode VEX.66.0F3A 0x42. */
573FNIEMOP_STUB(iemOp_vmpsadbw_Vx_Hx_Wx_Ib);
574/* Opcode VEX.66.0F3A 0x43 - invalid */
575
576
577/** Opcode VEX.66.0F3A 0x44. */
578FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
579{
580 //IEMOP_MNEMONIC3(VEX_RVM, VPCLMULQDQ, vpclmulqdq, Vdq, Hdq, Wdq, DISOPTYPE_HARMLESS, 0); /* @todo */
581
582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
583 if (IEM_IS_MODRM_REG_MODE(bRm))
584 {
585 /*
586 * Register, register.
587 */
588 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
589 IEM_MC_BEGIN(4, 0);
590 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
592 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
593 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
594 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
595 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
596 IEM_MC_PREPARE_AVX_USAGE();
597 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
598 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
599 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
600 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
601 puDst, puSrc1, puSrc2, bImmArg);
602 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
603 IEM_MC_ADVANCE_RIP_AND_FINISH();
604 IEM_MC_END();
605 }
606 else
607 {
608 /*
609 * Register, memory.
610 */
611 IEM_MC_BEGIN(4, 2);
612 IEM_MC_LOCAL(RTUINT128U, uSrc2);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
614 IEM_MC_ARG(PRTUINT128U, puDst, 0);
615 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
616 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
617
618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
619 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
620 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
621 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
623 IEM_MC_PREPARE_AVX_USAGE();
624
625 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
627 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
628 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
629 puDst, puSrc1, puSrc2, bImmArg);
630 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
631
632 IEM_MC_ADVANCE_RIP_AND_FINISH();
633 IEM_MC_END();
634 }
635}
636
637
638/* Opcode VEX.66.0F3A 0x45 - invalid */
639
640
641/** Opcode VEX.66.0F3A 0x46 (vex only) */
642FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib)
643{
644 //IEMOP_MNEMONIC4(VEX_RVM, VPERM2I128, vperm2i128, Vqq, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0); /** @todo */
645
646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
647 if (IEM_IS_MODRM_REG_MODE(bRm))
648 {
649 /*
650 * Register, register.
651 */
652 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
653 IEM_MC_BEGIN(4, 3);
654 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
655 IEM_MC_LOCAL(RTUINT256U, uDst);
656 IEM_MC_LOCAL(RTUINT256U, uSrc1);
657 IEM_MC_LOCAL(RTUINT256U, uSrc2);
658 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
659 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
660 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
661 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
662 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
663 IEM_MC_PREPARE_AVX_USAGE();
664 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
665 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
666 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
667 puDst, puSrc1, puSrc2, bImmArg);
668 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
669 IEM_MC_ADVANCE_RIP_AND_FINISH();
670 IEM_MC_END();
671 }
672 else
673 {
674 /*
675 * Register, memory.
676 */
677 IEM_MC_BEGIN(4, 2);
678 IEM_MC_LOCAL(RTUINT256U, uDst);
679 IEM_MC_LOCAL(RTUINT256U, uSrc1);
680 IEM_MC_LOCAL(RTUINT256U, uSrc2);
681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
682 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
683 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
684 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
685
686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
687 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
688 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
689 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
690 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
691 IEM_MC_PREPARE_AVX_USAGE();
692
693 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
694 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
695 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
696 puDst, puSrc1, puSrc2, bImmArg);
697 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
698
699 IEM_MC_ADVANCE_RIP_AND_FINISH();
700 IEM_MC_END();
701 }
702}
703
704
705/* Opcode VEX.66.0F3A 0x47 - invalid */
706/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
707FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
708/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
709FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
710
711
712/**
713 * Common worker for AVX2 instructions on the forms:
714 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
715 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
716 *
717 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
718 */
719FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
720{
721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
722 if (IEM_IS_MODRM_REG_MODE(bRm))
723 {
724 /*
725 * Register, register.
726 */
727 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
728 if (pVCpu->iem.s.uVexLength)
729 {
730 IEM_MC_BEGIN(4, 4);
731 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
732 IEM_MC_LOCAL(RTUINT256U, uDst);
733 IEM_MC_LOCAL(RTUINT256U, uSrc1);
734 IEM_MC_LOCAL(RTUINT256U, uSrc2);
735 IEM_MC_LOCAL(RTUINT256U, uSrc3);
736 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
737 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
738 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
739 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
741 IEM_MC_PREPARE_AVX_USAGE();
742 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
743 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
744 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
745 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
746 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
747 IEM_MC_ADVANCE_RIP_AND_FINISH();
748 IEM_MC_END();
749 }
750 else
751 {
752 IEM_MC_BEGIN(4, 0);
753 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
754 IEM_MC_ARG(PRTUINT128U, puDst, 0);
755 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
756 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
757 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
758 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
759 IEM_MC_PREPARE_AVX_USAGE();
760 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
761 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
762 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
763 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
764 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
765 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
766 IEM_MC_ADVANCE_RIP_AND_FINISH();
767 IEM_MC_END();
768 }
769 }
770 else
771 {
772 /*
773 * Register, memory.
774 */
775 if (pVCpu->iem.s.uVexLength)
776 {
777 IEM_MC_BEGIN(4, 5);
778 IEM_MC_LOCAL(RTUINT256U, uDst);
779 IEM_MC_LOCAL(RTUINT256U, uSrc1);
780 IEM_MC_LOCAL(RTUINT256U, uSrc2);
781 IEM_MC_LOCAL(RTUINT256U, uSrc3);
782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
783 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
784 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
785 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
786 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
787
788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
789 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
790
791 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
792 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
793 IEM_MC_PREPARE_AVX_USAGE();
794
795 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
796 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
797 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
798 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
799 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
800 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
801
802 IEM_MC_ADVANCE_RIP_AND_FINISH();
803 IEM_MC_END();
804 }
805 else
806 {
807 IEM_MC_BEGIN(4, 2);
808 IEM_MC_LOCAL(RTUINT128U, uSrc2);
809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
810 IEM_MC_ARG(PRTUINT128U, puDst, 0);
811 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
812 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
813 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
814
815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
816 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
817
818 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
819 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
820 IEM_MC_PREPARE_AVX_USAGE();
821
822 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
823 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
824 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
825 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
826 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
827 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
828
829 IEM_MC_ADVANCE_RIP_AND_FINISH();
830 IEM_MC_END();
831 }
832 }
833}
834
835
836/** Opcode VEX.66.0F3A 0x4a (vex only).
837 * AVX, AVX */
838FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
839{
840 //IEMOP_MNEMONIC4(VEX_RVM, VBLENDVPS, vpblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
841 IEMOPBLENDOP_INIT_VARS(vblendvps);
842 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
843}
844
845
846/** Opcode VEX.66.0F3A 0x4b (vex only).
847 * AVX, AVX */
848FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
849{
850 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVPD, blendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
851 IEMOPBLENDOP_INIT_VARS(vblendvpd);
852 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
853}
854
855
856/**
857 * Common worker for AVX2 instructions on the forms:
858 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
859 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
860 *
861 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
862 */
863FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
864{
865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
866 if (IEM_IS_MODRM_REG_MODE(bRm))
867 {
868 /*
869 * Register, register.
870 */
871 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
872 if (pVCpu->iem.s.uVexLength)
873 {
874 IEM_MC_BEGIN(4, 4);
875 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
876 IEM_MC_LOCAL(RTUINT256U, uDst);
877 IEM_MC_LOCAL(RTUINT256U, uSrc1);
878 IEM_MC_LOCAL(RTUINT256U, uSrc2);
879 IEM_MC_LOCAL(RTUINT256U, uSrc3);
880 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
881 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
882 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
883 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
884 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
885 IEM_MC_PREPARE_AVX_USAGE();
886 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
887 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
888 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
889 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
890 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
891 IEM_MC_ADVANCE_RIP_AND_FINISH();
892 IEM_MC_END();
893 }
894 else
895 {
896 IEM_MC_BEGIN(4, 0);
897 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
898 IEM_MC_ARG(PRTUINT128U, puDst, 0);
899 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
900 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
901 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
902 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
903 IEM_MC_PREPARE_AVX_USAGE();
904 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
906 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
907 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
908 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
909 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
910 IEM_MC_ADVANCE_RIP_AND_FINISH();
911 IEM_MC_END();
912 }
913 }
914 else
915 {
916 /*
917 * Register, memory.
918 */
919 if (pVCpu->iem.s.uVexLength)
920 {
921 IEM_MC_BEGIN(4, 5);
922 IEM_MC_LOCAL(RTUINT256U, uDst);
923 IEM_MC_LOCAL(RTUINT256U, uSrc1);
924 IEM_MC_LOCAL(RTUINT256U, uSrc2);
925 IEM_MC_LOCAL(RTUINT256U, uSrc3);
926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
927 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
928 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
929 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
930 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
931
932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
933 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
934
935 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
936 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
937 IEM_MC_PREPARE_AVX_USAGE();
938
939 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
940 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
941 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
942 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
943 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
944 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
945
946 IEM_MC_ADVANCE_RIP_AND_FINISH();
947 IEM_MC_END();
948 }
949 else
950 {
951 IEM_MC_BEGIN(4, 2);
952 IEM_MC_LOCAL(RTUINT128U, uSrc2);
953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
954 IEM_MC_ARG(PRTUINT128U, puDst, 0);
955 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
956 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
957 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
958
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
961
962 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
963 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
964 IEM_MC_PREPARE_AVX_USAGE();
965
966 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
967 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
968 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
969 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
970 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
971 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
972
973 IEM_MC_ADVANCE_RIP_AND_FINISH();
974 IEM_MC_END();
975 }
976 }
977}
978
979
980/** Opcode VEX.66.0F3A 0x4c (vex only).
981 * AVX, AVX2 */
982FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
983{
984 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVB, vpblendvb, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
985 IEMOPBLENDOP_INIT_VARS(vpblendvb);
986 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
987}
988
989
990/* Opcode VEX.66.0F3A 0x4d - invalid */
991/* Opcode VEX.66.0F3A 0x4e - invalid */
992/* Opcode VEX.66.0F3A 0x4f - invalid */
993
994
995/* Opcode VEX.66.0F3A 0x50 - invalid */
996/* Opcode VEX.66.0F3A 0x51 - invalid */
997/* Opcode VEX.66.0F3A 0x52 - invalid */
998/* Opcode VEX.66.0F3A 0x53 - invalid */
999/* Opcode VEX.66.0F3A 0x54 - invalid */
1000/* Opcode VEX.66.0F3A 0x55 - invalid */
1001/* Opcode VEX.66.0F3A 0x56 - invalid */
1002/* Opcode VEX.66.0F3A 0x57 - invalid */
1003/* Opcode VEX.66.0F3A 0x58 - invalid */
1004/* Opcode VEX.66.0F3A 0x59 - invalid */
1005/* Opcode VEX.66.0F3A 0x5a - invalid */
1006/* Opcode VEX.66.0F3A 0x5b - invalid */
1007/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
1008FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
1009/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
1010FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
1011/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
1012FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
1013/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
1014FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
1015
1016
1017/** Opcode VEX.66.0F3A 0x60. */
1018FNIEMOP_STUB(iemOp_vpcmpestrm_Vdq_Wdq_Ib);
1019/** Opcode VEX.66.0F3A 0x61, */
1020FNIEMOP_STUB(iemOp_vpcmpestri_Vdq_Wdq_Ib);
1021/** Opcode VEX.66.0F3A 0x62. */
1022FNIEMOP_STUB(iemOp_vpcmpistrm_Vdq_Wdq_Ib);
1023/** Opcode VEX.66.0F3A 0x63*/
1024FNIEMOP_STUB(iemOp_vpcmpistri_Vdq_Wdq_Ib);
1025/* Opcode VEX.66.0F3A 0x64 - invalid */
1026/* Opcode VEX.66.0F3A 0x65 - invalid */
1027/* Opcode VEX.66.0F3A 0x66 - invalid */
1028/* Opcode VEX.66.0F3A 0x67 - invalid */
1029/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
1030FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
1031/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
1032FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
1033/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
1034FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
1035/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
1036FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
1037/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
1038FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
1039/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
1040FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
1041/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
1042FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
1043/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
1044FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
1045
1046/* Opcode VEX.66.0F3A 0x70 - invalid */
1047/* Opcode VEX.66.0F3A 0x71 - invalid */
1048/* Opcode VEX.66.0F3A 0x72 - invalid */
1049/* Opcode VEX.66.0F3A 0x73 - invalid */
1050/* Opcode VEX.66.0F3A 0x74 - invalid */
1051/* Opcode VEX.66.0F3A 0x75 - invalid */
1052/* Opcode VEX.66.0F3A 0x76 - invalid */
1053/* Opcode VEX.66.0F3A 0x77 - invalid */
1054/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
1055FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
1056/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
1057FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
1058/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
1059FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
1060/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
1061FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
1062/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
1063FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
1064/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
1065FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
1066/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
1067FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
1068/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
1069FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
1070
1071/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
1072
1073
1074/* Opcode 0x0f 0xc0 - invalid */
1075/* Opcode 0x0f 0xc1 - invalid */
1076/* Opcode 0x0f 0xc2 - invalid */
1077/* Opcode 0x0f 0xc3 - invalid */
1078/* Opcode 0x0f 0xc4 - invalid */
1079/* Opcode 0x0f 0xc5 - invalid */
1080/* Opcode 0x0f 0xc6 - invalid */
1081/* Opcode 0x0f 0xc7 - invalid */
1082/* Opcode 0x0f 0xc8 - invalid */
1083/* Opcode 0x0f 0xc9 - invalid */
1084/* Opcode 0x0f 0xca - invalid */
1085/* Opcode 0x0f 0xcb - invalid */
1086/* Opcode 0x0f 0xcc - invalid */
1087/* Opcode 0x0f 0xcd - invalid */
1088/* Opcode 0x0f 0xce - invalid */
1089/* Opcode 0x0f 0xcf - invalid */
1090
1091
1092/* Opcode VEX.66.0F3A 0xd0 - invalid */
1093/* Opcode VEX.66.0F3A 0xd1 - invalid */
1094/* Opcode VEX.66.0F3A 0xd2 - invalid */
1095/* Opcode VEX.66.0F3A 0xd3 - invalid */
1096/* Opcode VEX.66.0F3A 0xd4 - invalid */
1097/* Opcode VEX.66.0F3A 0xd5 - invalid */
1098/* Opcode VEX.66.0F3A 0xd6 - invalid */
1099/* Opcode VEX.66.0F3A 0xd7 - invalid */
1100/* Opcode VEX.66.0F3A 0xd8 - invalid */
1101/* Opcode VEX.66.0F3A 0xd9 - invalid */
1102/* Opcode VEX.66.0F3A 0xda - invalid */
1103/* Opcode VEX.66.0F3A 0xdb - invalid */
1104/* Opcode VEX.66.0F3A 0xdc - invalid */
1105/* Opcode VEX.66.0F3A 0xdd - invalid */
1106/* Opcode VEX.66.0F3A 0xde - invalid */
1107/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
1108FNIEMOP_STUB(iemOp_vaeskeygen_Vdq_Wdq_Ib);
1109
1110
1111/** Opcode VEX.F2.0F3A (vex only) */
1112FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
1113{
1114 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
1115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1116 if (IEM_IS_MODRM_REG_MODE(bRm))
1117 {
1118 /*
1119 * Register, register.
1120 */
1121 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1122 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1123 {
1124 IEM_MC_BEGIN(3, 0);
1125 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1126 IEM_MC_ARG(uint64_t *, pDst, 0);
1127 IEM_MC_ARG(uint64_t, uSrc1, 1);
1128 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1129 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1130 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1131 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1132 IEM_MC_ADVANCE_RIP_AND_FINISH();
1133 IEM_MC_END();
1134 }
1135 else
1136 {
1137 IEM_MC_BEGIN(3, 0);
1138 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1139 IEM_MC_ARG(uint32_t *, pDst, 0);
1140 IEM_MC_ARG(uint32_t, uSrc1, 1);
1141 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1142 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1143 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1144 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1145 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
1146 IEM_MC_ADVANCE_RIP_AND_FINISH();
1147 IEM_MC_END();
1148 }
1149 }
1150 else
1151 {
1152 /*
1153 * Register, memory.
1154 */
1155 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1156 {
1157 IEM_MC_BEGIN(3, 1);
1158 IEM_MC_ARG(uint64_t *, pDst, 0);
1159 IEM_MC_ARG(uint64_t, uSrc1, 1);
1160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1162 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1163 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1164 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1165 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1166 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1167 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1168 IEM_MC_ADVANCE_RIP_AND_FINISH();
1169 IEM_MC_END();
1170 }
1171 else
1172 {
1173 IEM_MC_BEGIN(3, 1);
1174 IEM_MC_ARG(uint32_t *, pDst, 0);
1175 IEM_MC_ARG(uint32_t, uSrc1, 1);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1178 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1179 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1180 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1181 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1182 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1183 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1184 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
1185 IEM_MC_ADVANCE_RIP_AND_FINISH();
1186 IEM_MC_END();
1187 }
1188 }
1189}
1190
1191
1192/**
1193 * VEX opcode map \#3.
1194 *
1195 * @sa g_apfnThreeByte0f3a
1196 */
1197const PFNIEMOP g_apfnVexMap3[] =
1198{
1199 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1200 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1201 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermqd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1202 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1203 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1204 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1205 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1206 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1207 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1208 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1209 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1210 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1211 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1212 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1213 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1214 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1215 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1216
1217 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1218 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1219 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1220 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1221 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_RdMb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1222 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1223 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1224 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1225 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1226 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1227 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1228 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1229 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1230 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1231 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1232 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1233
1234 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1235 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1236 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1237 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1238 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1239 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1240 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1241 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1242 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1243 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1244 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1245 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1246 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1247 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1248 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1249 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1250
1251 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1252 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1253 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1254 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1255 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1256 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1257 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1258 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1259 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1260 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1261 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1262 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1263 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1264 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1265 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1266 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1267
1268 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1269 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1270 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1271 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1272 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1273 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1274 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1275 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1276 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1277 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1278 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1279 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1280 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1281 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1282 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1283 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1284
1285 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1286 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1287 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1288 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1289 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1290 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1291 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1292 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1293 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1294 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1295 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1296 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1297 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1298 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1299 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1300 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1301
1302 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1303 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1304 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1305 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1306 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1307 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1308 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1309 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1310 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1311 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1312 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1313 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1314 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1315 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1316 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1317 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1318
1319 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1320 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1321 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1322 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1323 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1324 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1325 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1326 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1327 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1328 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1329 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1330 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1331 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1332 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1333 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1334 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1335
1336 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1337 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1338 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1339 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1340 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1341 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1342 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1343 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1344 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1345 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1346 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1347 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1348 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1349 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1350 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1351 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1352
1353 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1354 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1355 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1356 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1357 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1358 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1359 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1360 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1361 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1362 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1363 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1364 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1365 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1366 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1367 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1368 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1369
1370 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1371 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1372 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1373 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1374 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1375 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1376 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1377 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1378 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1379 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1380 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1381 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1382 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1383 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1384 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1385 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1386
1387 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1388 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1389 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1390 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1391 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1392 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1393 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1394 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1395 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1396 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1397 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1398 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1399 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1400 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1401 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1402 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1403
1404 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1405 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1406 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1407 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1408 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1409 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1410 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1411 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1412 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1413 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1414 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1415 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1416 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1417 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1418 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1419 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1420
1421 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1422 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1423 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1424 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1425 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1426 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1427 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1428 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1429 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1430 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1431 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1432 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1433 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1434 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1435 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1436 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1437
1438 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1439 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1440 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1441 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1442 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1443 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1444 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1445 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1446 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1447 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1448 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1449 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1450 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1451 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1452 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1453 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1454
1455 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
1456 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1457 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1458 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1459 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1460 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1461 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1462 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1463 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1464 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1465 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1466 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1467 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1468 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1469 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1470 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1471};
1472AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
1473
1474/** @} */
1475
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette