VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h@ 104074

Last change on this file since 104074 was 104059, checked in by vboxsync, 11 months ago

VMM/IEM: Implement vextracti128/vextractf128 instruction emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 80.5 KB
Line 
1/* $Id: IEMAllInstVexMap3.cpp.h 104059 2024-03-26 13:43:01Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
56 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
57 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
58 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
59 IEM_MC_PREPARE_AVX_USAGE();
60
61 IEM_MC_LOCAL(RTUINT256U, uDst);
62 IEM_MC_LOCAL(RTUINT256U, uSrc1);
63 IEM_MC_LOCAL(RTUINT256U, uSrc2);
64 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
65 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
66 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
67 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
68 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
69 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
70 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
71 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
72 IEM_MC_ADVANCE_RIP_AND_FINISH();
73 IEM_MC_END();
74 }
75 else
76 {
77 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
78 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
79 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
80 IEM_MC_ARG(PRTUINT128U, puDst, 0);
81 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
82 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
83 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
84 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
85 IEM_MC_PREPARE_AVX_USAGE();
86 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
87 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
88 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
89 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
90 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94 }
95 else
96 {
97 /*
98 * Register, memory.
99 */
100 if (pVCpu->iem.s.uVexLength)
101 {
102 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
103 IEM_MC_LOCAL(RTUINT256U, uDst);
104 IEM_MC_LOCAL(RTUINT256U, uSrc1);
105 IEM_MC_LOCAL(RTUINT256U, uSrc2);
106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
107 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
109 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
110
111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
113 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
114 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
116 IEM_MC_PREPARE_AVX_USAGE();
117
118 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
119 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
120 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
121 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
122
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
129 IEM_MC_LOCAL(RTUINT128U, uSrc2);
130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
132 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
134
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
136 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
137 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
138 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
140 IEM_MC_PREPARE_AVX_USAGE();
141
142 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
143 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
144 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
145 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
146 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
147
148 IEM_MC_ADVANCE_RIP_AND_FINISH();
149 IEM_MC_END();
150 }
151 }
152}
153
154
155/**
156 * Common worker for AVX instructions on the forms:
157 * - vpermilps/d xmm0, xmm1/mem128, imm8
158 * - vpermilps/d ymm0, ymm1/mem256, imm8
159 *
160 * Takes function table for function w/o implicit state parameter.
161 *
162 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
163 */
164FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF2IMM8, pImpl)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * Register, register.
171 */
172 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
173 if (pVCpu->iem.s.uVexLength)
174 {
175 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
176 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
177 IEM_MC_LOCAL(RTUINT256U, uDst);
178 IEM_MC_LOCAL(RTUINT256U, uSrc);
179 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
181 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
182 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
183 IEM_MC_PREPARE_AVX_USAGE();
184 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
186 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
194 IEM_MC_ARG(PRTUINT128U, puDst, 0);
195 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
196 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
197 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
198 IEM_MC_PREPARE_AVX_USAGE();
199 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
200 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
202 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
203 IEM_MC_ADVANCE_RIP_AND_FINISH();
204 IEM_MC_END();
205 }
206 }
207 else
208 {
209 /*
210 * Register, memory.
211 */
212 if (pVCpu->iem.s.uVexLength)
213 {
214 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
215 IEM_MC_LOCAL(RTUINT256U, uDst);
216 IEM_MC_LOCAL(RTUINT256U, uSrc);
217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
218 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
219 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
220
221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
222 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
223 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
224 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
226 IEM_MC_PREPARE_AVX_USAGE();
227
228 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
229 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
230 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
231
232 IEM_MC_ADVANCE_RIP_AND_FINISH();
233 IEM_MC_END();
234 }
235 else
236 {
237 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
238 IEM_MC_LOCAL(RTUINT128U, uSrc);
239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
240 IEM_MC_ARG(PRTUINT128U, puDst, 0);
241 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
242
243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
244 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
245 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
246 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
248 IEM_MC_PREPARE_AVX_USAGE();
249
250 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
251 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
252 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
253 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
254
255 IEM_MC_ADVANCE_RIP_AND_FINISH();
256 IEM_MC_END();
257 }
258 }
259}
260
261
262/**
263 * Common worker for AVX instructions on the forms:
264 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
265 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
266 *
267 * Takes function table for function w/o implicit state parameter.
268 *
269 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
270 */
271FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
272{
273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
274 if (IEM_IS_MODRM_REG_MODE(bRm))
275 {
276 /*
277 * Register, register.
278 */
279 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
280 if (pVCpu->iem.s.uVexLength)
281 {
282 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
283 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
284 IEM_MC_LOCAL(RTUINT256U, uDst);
285 IEM_MC_LOCAL(RTUINT256U, uSrc1);
286 IEM_MC_LOCAL(RTUINT256U, uSrc2);
287 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
288 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
289 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
290 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
291 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
292 IEM_MC_PREPARE_AVX_USAGE();
293 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
294 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
295 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
296 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
297 IEM_MC_ADVANCE_RIP_AND_FINISH();
298 IEM_MC_END();
299 }
300 else
301 {
302 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
303 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
304 IEM_MC_ARG(PRTUINT128U, puDst, 0);
305 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
306 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
307 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
308 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
309 IEM_MC_PREPARE_AVX_USAGE();
310 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
311 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
312 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
313 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
314 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
315 IEM_MC_ADVANCE_RIP_AND_FINISH();
316 IEM_MC_END();
317 }
318 }
319 else
320 {
321 /*
322 * Register, memory.
323 */
324 if (pVCpu->iem.s.uVexLength)
325 {
326 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
327 IEM_MC_LOCAL(RTUINT256U, uDst);
328 IEM_MC_LOCAL(RTUINT256U, uSrc1);
329 IEM_MC_LOCAL(RTUINT256U, uSrc2);
330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
331 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
332 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
333 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
334
335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
336 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
337 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
338 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
339 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
340 IEM_MC_PREPARE_AVX_USAGE();
341
342 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
343 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
344 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
345 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
346
347 IEM_MC_ADVANCE_RIP_AND_FINISH();
348 IEM_MC_END();
349 }
350 else
351 {
352 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
353 IEM_MC_LOCAL(RTUINT128U, uSrc2);
354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
355 IEM_MC_ARG(PRTUINT128U, puDst, 0);
356 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
357 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
358
359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
360 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
361 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
362 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
363 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
364 IEM_MC_PREPARE_AVX_USAGE();
365
366 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
367 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
368 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
369 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
370 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
371
372 IEM_MC_ADVANCE_RIP_AND_FINISH();
373 IEM_MC_END();
374 }
375 }
376}
377
378
379/** Opcode VEX.66.0F3A 0x00. */
380FNIEMOP_STUB(iemOp_vpermq_Vqq_Wqq_Ib);
381/** Opcode VEX.66.0F3A 0x01. */
382FNIEMOP_STUB(iemOp_vpermqd_Vqq_Wqq_Ib);
383
384
385/** Opcode VEX.66.0F3A 0x02.
386 * AVX2,AVX2 */
387FNIEMOP_DEF(iemOp_vpblendd_Vx_Hx_Wx_Ib)
388{
389 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDD, vpblendd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
390 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendd);
391 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
392}
393
394
395/* Opcode VEX.66.0F3A 0x03 - invalid */
396
397
398/** Opcode VEX.66.0F3A 0x04.
399 * AVX,AVX */
400FNIEMOP_DEF(iemOp_vpermilps_Vx_Wx_Ib)
401{
402 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPS, vpermilps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
403 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilps);
404 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
405}
406
407
408/** Opcode VEX.66.0F3A 0x05.
409 * AVX,AVX */
410FNIEMOP_DEF(iemOp_vpermilpd_Vx_Wx_Ib)
411{
412 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPD, vpermilpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
413 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilpd);
414 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
415}
416
417
418/** Opcode VEX.66.0F3A 0x06 (vex only) */
419FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib)
420{
421 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2F128, vperm2f128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0);
422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
423 if (IEM_IS_MODRM_REG_MODE(bRm))
424 {
425 /*
426 * Register, register.
427 */
428 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
429 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
430 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
431 IEM_MC_LOCAL(RTUINT256U, uDst);
432 IEM_MC_LOCAL(RTUINT256U, uSrc1);
433 IEM_MC_LOCAL(RTUINT256U, uSrc2);
434 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
435 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
436 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
437 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
438 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
439 IEM_MC_PREPARE_AVX_USAGE();
440 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
441 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
442 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
443 puDst, puSrc1, puSrc2, bImmArg);
444 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
445 IEM_MC_ADVANCE_RIP_AND_FINISH();
446 IEM_MC_END();
447 }
448 else
449 {
450 /*
451 * Register, memory.
452 */
453 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
454 IEM_MC_LOCAL(RTUINT256U, uDst);
455 IEM_MC_LOCAL(RTUINT256U, uSrc1);
456 IEM_MC_LOCAL(RTUINT256U, uSrc2);
457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
458 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
459 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
460 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
461
462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
463 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
464 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
465 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
466 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
467 IEM_MC_PREPARE_AVX_USAGE();
468
469 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
470 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
471 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
472 puDst, puSrc1, puSrc2, bImmArg);
473 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
474
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478}
479
480
481/* Opcode VEX.66.0F3A 0x07 - invalid */
482/** Opcode VEX.66.0F3A 0x08. */
483FNIEMOP_STUB(iemOp_vroundps_Vx_Wx_Ib);
484/** Opcode VEX.66.0F3A 0x09. */
485FNIEMOP_STUB(iemOp_vroundpd_Vx_Wx_Ib);
486/** Opcode VEX.66.0F3A 0x0a. */
487FNIEMOP_STUB(iemOp_vroundss_Vss_Wss_Ib);
488/** Opcode VEX.66.0F3A 0x0b. */
489FNIEMOP_STUB(iemOp_vroundsd_Vsd_Wsd_Ib);
490
491
492/** Opcode VEX.66.0F3A 0x0c.
493 * AVX,AVX */
494FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
495{
496 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPS, vblendps, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
497 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
498 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
499}
500
501
502/** Opcode VEX.66.0F3A 0x0d.
503 * AVX,AVX */
504FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
505{
506 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPD, vblendpd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
507 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
509}
510
511
512/** Opcode VEX.66.0F3A 0x0e.
513 * AVX,AVX2 */
514FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
515{
516 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDW, vpblendw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
517 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
518 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
519}
520
521
522/** Opcode VEX.0F3A 0x0f - invalid. */
523
524
525/** Opcode VEX.66.0F3A 0x0f.
526 * AVX,AVX2 */
527FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
528{
529 IEMOP_MNEMONIC4(VEX_RVMI, VPALIGNR, vpalignr, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
530 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
531 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
532}
533
534
535/* Opcode VEX.66.0F3A 0x10 - invalid */
536/* Opcode VEX.66.0F3A 0x11 - invalid */
537/* Opcode VEX.66.0F3A 0x12 - invalid */
538/* Opcode VEX.66.0F3A 0x13 - invalid */
539
540
541/** Opcode VEX.66.0F3A 0x14 - vpextrb Eb, Vdq, Ib */
542FNIEMOP_DEF(iemOp_vpextrb_Eb_Vdq_Ib)
543{
544 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRB, vpextrb, Eb, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
546 if (IEM_IS_MODRM_REG_MODE(bRm))
547 {
548 /*
549 * greg32, XMM, imm8.
550 */
551 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
552 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
553 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
554 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
555 IEM_MC_PREPARE_AVX_USAGE();
556
557 IEM_MC_LOCAL(uint8_t, uValue);
558 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
559 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
560 IEM_MC_ADVANCE_RIP_AND_FINISH();
561 IEM_MC_END();
562 }
563 else
564 {
565 /*
566 * [mem8], XMM, imm8.
567 */
568 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
572
573 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
575 IEM_MC_PREPARE_AVX_USAGE();
576
577 IEM_MC_LOCAL(uint8_t, uValue);
578 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
579 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
580 IEM_MC_ADVANCE_RIP_AND_FINISH();
581 IEM_MC_END();
582 }
583}
584
585
586/** Opcode VEX.66.0F3A 0x15 - vpextrw Ew, Vdq, Ib */
587FNIEMOP_DEF(iemOp_vpextrw_Ew_Vdq_Ib)
588{
589 /** @todo testcase: check that this ignores VEX.W. */
590 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRW, vpextrw, Ew_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
592 if (IEM_IS_MODRM_REG_MODE(bRm))
593 {
594 /*
595 * greg32, XMM, imm8.
596 */
597 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
598 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
599 IEM_MC_LOCAL(uint16_t, uValue);
600
601 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
602 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
603 IEM_MC_PREPARE_AVX_USAGE();
604
605 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
606 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
607 IEM_MC_ADVANCE_RIP_AND_FINISH();
608 IEM_MC_END();
609 }
610 else
611 {
612 /*
613 * [mem16], XMM, imm8.
614 */
615 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
617 IEM_MC_LOCAL(uint16_t, uValue);
618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
620
621 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
623 IEM_MC_PREPARE_AVX_USAGE();
624
625 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
626 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
627 IEM_MC_ADVANCE_RIP_AND_FINISH();
628 IEM_MC_END();
629 }
630}
631
632
633/** Opcode VEX.66.0F3A 0x16 - vpextrd / vpextrq Eq / Ey, Vdq, Ib */
634FNIEMOP_DEF(iemOp_vpextrd_q_Ey_Vdq_Ib)
635{
636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
637 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
638 {
639 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRQ, vpextrq, Eq_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ONE);
640 if (IEM_IS_MODRM_REG_MODE(bRm))
641 {
642 /*
643 * greg64, XMM, imm8.
644 */
645 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
646 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
647 IEM_MC_LOCAL(uint64_t, uValue);
648
649 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
650 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
651 IEM_MC_PREPARE_AVX_USAGE();
652
653 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
654 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
655 IEM_MC_ADVANCE_RIP_AND_FINISH();
656 IEM_MC_END();
657 }
658 else
659 {
660 /*
661 * [mem64], XMM, imm8.
662 */
663 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
664 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
665 IEM_MC_LOCAL(uint64_t, uValue);
666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
668
669 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
671 IEM_MC_PREPARE_AVX_USAGE();
672
673 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
674 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
675 IEM_MC_ADVANCE_RIP_AND_FINISH();
676 IEM_MC_END();
677 }
678 }
679 else
680 {
681 /**
682 * @opdone
683 */
684 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRD, vpextrd, Ey_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ZERO);
685 if (IEM_IS_MODRM_REG_MODE(bRm))
686 {
687 /*
688 * greg32, XMM, imm8.
689 */
690 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
691 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
692 IEM_MC_LOCAL(uint32_t, uValue);
693
694 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
695 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
696 IEM_MC_PREPARE_AVX_USAGE();
697
698 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
699 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
700 IEM_MC_ADVANCE_RIP_AND_FINISH();
701 IEM_MC_END();
702 }
703 else
704 {
705 /*
706 * [mem32], XMM, imm8.
707 */
708 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
709 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
710 IEM_MC_LOCAL(uint32_t, uValue);
711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
713
714 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
715 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
716 IEM_MC_PREPARE_AVX_USAGE();
717
718 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
719 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 }
724}
725
726
727/** Opcode VEX.66.0F3A 0x17. */
728FNIEMOP_STUB(iemOp_vextractps_Ed_Vdq_Ib);
729
730
731/** Opcode VEX.66.0F3A 0x18 (vex only). */
732FNIEMOP_DEF(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib)
733{
734 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTF128, vinsertf128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
736 if (IEM_IS_MODRM_REG_MODE(bRm))
737 {
738 /*
739 * Register, register.
740 */
741 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
742 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
743 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
744 IEM_MC_LOCAL(RTUINT128U, uSrc);
745
746 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
747 IEM_MC_PREPARE_AVX_USAGE();
748
749 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
750 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
751 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
752
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756 else
757 {
758 /*
759 * Register, memory.
760 */
761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
762 IEM_MC_LOCAL(RTUINT128U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
767 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
768 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
769 IEM_MC_PREPARE_AVX_USAGE();
770
771 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
772 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
773 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
774
775 IEM_MC_ADVANCE_RIP_AND_FINISH();
776 IEM_MC_END();
777 }
778}
779
780
781/** Opcode VEX.66.0F3A 0x19 (vex only). */
782FNIEMOP_DEF(iemOp_vextractf128_Wdq_Vqq_Ib)
783{
784 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTF128, vextractf128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
786 if (IEM_IS_MODRM_REG_MODE(bRm))
787 {
788 /*
789 * Register, register.
790 */
791 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
792 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
793 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
794
795 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
796 IEM_MC_PREPARE_AVX_USAGE();
797
798 IEM_MC_LOCAL(RTUINT128U, uDst);
799 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
800 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
801
802 IEM_MC_ADVANCE_RIP_AND_FINISH();
803 IEM_MC_END();
804 }
805 else
806 {
807 /*
808 * Register, memory.
809 */
810 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
812
813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
814 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
815 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
816 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
817 IEM_MC_PREPARE_AVX_USAGE();
818
819 IEM_MC_LOCAL(RTUINT128U, uDst);
820 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
821 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
822
823 IEM_MC_ADVANCE_RIP_AND_FINISH();
824 IEM_MC_END();
825 }
826}
827
828
829/* Opcode VEX.66.0F3A 0x1a - invalid */
830/* Opcode VEX.66.0F3A 0x1b - invalid */
831/* Opcode VEX.66.0F3A 0x1c - invalid */
832/** Opcode VEX.66.0F3A 0x1d (vex only). */
833FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
834/* Opcode VEX.66.0F3A 0x1e - invalid */
835/* Opcode VEX.66.0F3A 0x1f - invalid */
836
837
838/** Opcode VEX.66.0F3A 0x20. */
839FNIEMOP_STUB(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib);
840/** Opcode VEX.66.0F3A 0x21, */
841FNIEMOP_STUB(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib);
842/** Opcode VEX.66.0F3A 0x22. */
843FNIEMOP_STUB(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib);
844/* Opcode VEX.66.0F3A 0x23 - invalid */
845/* Opcode VEX.66.0F3A 0x24 - invalid */
846/* Opcode VEX.66.0F3A 0x25 - invalid */
847/* Opcode VEX.66.0F3A 0x26 - invalid */
848/* Opcode VEX.66.0F3A 0x27 - invalid */
849/* Opcode VEX.66.0F3A 0x28 - invalid */
850/* Opcode VEX.66.0F3A 0x29 - invalid */
851/* Opcode VEX.66.0F3A 0x2a - invalid */
852/* Opcode VEX.66.0F3A 0x2b - invalid */
853/* Opcode VEX.66.0F3A 0x2c - invalid */
854/* Opcode VEX.66.0F3A 0x2d - invalid */
855/* Opcode VEX.66.0F3A 0x2e - invalid */
856/* Opcode VEX.66.0F3A 0x2f - invalid */
857
858
859/* Opcode VEX.66.0F3A 0x30 - invalid */
860/* Opcode VEX.66.0F3A 0x31 - invalid */
861/* Opcode VEX.66.0F3A 0x32 - invalid */
862/* Opcode VEX.66.0F3A 0x33 - invalid */
863/* Opcode VEX.66.0F3A 0x34 - invalid */
864/* Opcode VEX.66.0F3A 0x35 - invalid */
865/* Opcode VEX.66.0F3A 0x36 - invalid */
866/* Opcode VEX.66.0F3A 0x37 - invalid */
867
868
869/** Opcode VEX.66.0F3A 0x38 (vex only). */
870FNIEMOP_DEF(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib)
871{
872 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTI128, vinserti128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
874 if (IEM_IS_MODRM_REG_MODE(bRm))
875 {
876 /*
877 * Register, register.
878 */
879 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
880 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
881 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
882 IEM_MC_LOCAL(RTUINT128U, uSrc);
883
884 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
885 IEM_MC_PREPARE_AVX_USAGE();
886
887 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
888 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
889 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
890
891 IEM_MC_ADVANCE_RIP_AND_FINISH();
892 IEM_MC_END();
893 }
894 else
895 {
896 /*
897 * Register, memory.
898 */
899 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
900 IEM_MC_LOCAL(RTUINT128U, uSrc);
901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
902
903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
904 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
905 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
906 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
907 IEM_MC_PREPARE_AVX_USAGE();
908
909 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
910 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
911 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
912
913 IEM_MC_ADVANCE_RIP_AND_FINISH();
914 IEM_MC_END();
915 }
916}
917
918
919/** Opcode VEX.66.0F3A 0x39 (vex only). */
920FNIEMOP_DEF(iemOp_vextracti128_Wdq_Vqq_Ib)
921{
922 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTI128, vextracti128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
924 if (IEM_IS_MODRM_REG_MODE(bRm))
925 {
926 /*
927 * Register, register.
928 */
929 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
930 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
931 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
932
933 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
934 IEM_MC_PREPARE_AVX_USAGE();
935
936 IEM_MC_LOCAL(RTUINT128U, uDst);
937 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
938 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
939
940 IEM_MC_ADVANCE_RIP_AND_FINISH();
941 IEM_MC_END();
942 }
943 else
944 {
945 /*
946 * Register, memory.
947 */
948 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
950
951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
952 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
953 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
954 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
955 IEM_MC_PREPARE_AVX_USAGE();
956
957 IEM_MC_LOCAL(RTUINT128U, uDst);
958 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
959 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
960
961 IEM_MC_ADVANCE_RIP_AND_FINISH();
962 IEM_MC_END();
963 }
964}
965
966
967/* Opcode VEX.66.0F3A 0x3a - invalid */
968/* Opcode VEX.66.0F3A 0x3b - invalid */
969/* Opcode VEX.66.0F3A 0x3c - invalid */
970/* Opcode VEX.66.0F3A 0x3d - invalid */
971/* Opcode VEX.66.0F3A 0x3e - invalid */
972/* Opcode VEX.66.0F3A 0x3f - invalid */
973
974
975/** Opcode VEX.66.0F3A 0x40. */
976FNIEMOP_STUB(iemOp_vdpps_Vx_Hx_Wx_Ib);
977/** Opcode VEX.66.0F3A 0x41, */
978FNIEMOP_STUB(iemOp_vdppd_Vdq_Hdq_Wdq_Ib);
979
980
981/** Opcode VEX.66.0F3A 0x42. */
982FNIEMOP_DEF(iemOp_vmpsadbw_Vx_Hx_Wx_Ib)
983{
984 IEMOP_MNEMONIC4(VEX_RVMI, VMPSADBW, vmpsadbw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
985 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vmpsadbw);
986 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
987}
988
989
990/* Opcode VEX.66.0F3A 0x43 - invalid */
991
992
993/** Opcode VEX.66.0F3A 0x44. */
994FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
995{
996 IEMOP_MNEMONIC4(VEX_RVMI, VPCLMULQDQ, vpclmulqdq, Vdq_WO, Hdq, Wdq, Id, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
998 if (IEM_IS_MODRM_REG_MODE(bRm))
999 {
1000 /*
1001 * Register, register.
1002 */
1003 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1004 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1005 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1006 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1007 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1008 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1009 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1010 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1011 IEM_MC_PREPARE_AVX_USAGE();
1012 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1013 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1014 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1015 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1016 puDst, puSrc1, puSrc2, bImmArg);
1017 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1018 IEM_MC_ADVANCE_RIP_AND_FINISH();
1019 IEM_MC_END();
1020 }
1021 else
1022 {
1023 /*
1024 * Register, memory.
1025 */
1026 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1027 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1029 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1030 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1031 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1032
1033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1034 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1035 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1036 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1037 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1038 IEM_MC_PREPARE_AVX_USAGE();
1039
1040 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1041 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1042 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1043 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1044 puDst, puSrc1, puSrc2, bImmArg);
1045 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1046
1047 IEM_MC_ADVANCE_RIP_AND_FINISH();
1048 IEM_MC_END();
1049 }
1050}
1051
1052
1053/* Opcode VEX.66.0F3A 0x45 - invalid */
1054
1055
1056/** Opcode VEX.66.0F3A 0x46 (vex only) */
1057FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib)
1058{
1059 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2I128, vperm2i128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ONE);
1060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1061 if (IEM_IS_MODRM_REG_MODE(bRm))
1062 {
1063 /*
1064 * Register, register.
1065 */
1066 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1067 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1068 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1069 IEM_MC_LOCAL(RTUINT256U, uDst);
1070 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1071 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1072 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1073 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1074 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1075 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1076 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1077 IEM_MC_PREPARE_AVX_USAGE();
1078 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1079 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1080 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1081 puDst, puSrc1, puSrc2, bImmArg);
1082 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1083 IEM_MC_ADVANCE_RIP_AND_FINISH();
1084 IEM_MC_END();
1085 }
1086 else
1087 {
1088 /*
1089 * Register, memory.
1090 */
1091 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1092 IEM_MC_LOCAL(RTUINT256U, uDst);
1093 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1094 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1096 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1097 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1098 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1099
1100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1101 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1102 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1103 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1105 IEM_MC_PREPARE_AVX_USAGE();
1106
1107 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1108 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1109 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1110 puDst, puSrc1, puSrc2, bImmArg);
1111 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1112
1113 IEM_MC_ADVANCE_RIP_AND_FINISH();
1114 IEM_MC_END();
1115 }
1116}
1117
1118
1119/* Opcode VEX.66.0F3A 0x47 - invalid */
1120/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
1121FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
1122/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
1123FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
1124
1125
1126/**
1127 * Common worker for AVX2 instructions on the forms:
1128 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
1129 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
1130 *
1131 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
1132 * Additionally, it triggers \#UD if VEX.W is 1.
1133 */
1134FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1135{
1136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1137 if (IEM_IS_MODRM_REG_MODE(bRm))
1138 {
1139 /*
1140 * Register, register.
1141 */
1142 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1143 if (pVCpu->iem.s.uVexLength)
1144 {
1145 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1146 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1147 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1148 IEM_MC_PREPARE_AVX_USAGE();
1149 IEM_MC_LOCAL(RTUINT256U, uDst);
1150 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1151 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1152 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1153 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1154 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1155 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1156 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1157 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1158 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1159 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1160 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1161 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1162 IEM_MC_ADVANCE_RIP_AND_FINISH();
1163 IEM_MC_END();
1164 }
1165 else
1166 {
1167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1170 IEM_MC_PREPARE_AVX_USAGE();
1171 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1172 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1173 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1174 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1175 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1176 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1177 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1178 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1179 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1180 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1181 IEM_MC_ADVANCE_RIP_AND_FINISH();
1182 IEM_MC_END();
1183 }
1184 }
1185 else
1186 {
1187 /*
1188 * Register, memory.
1189 */
1190 if (pVCpu->iem.s.uVexLength)
1191 {
1192 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1195 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1196 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1197 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1198 IEM_MC_PREPARE_AVX_USAGE();
1199
1200 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1201 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1202 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1203
1204 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1205 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1206 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1207 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1208 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1209 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1210 IEM_MC_LOCAL(RTUINT256U, uDst);
1211 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1212 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1213 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1214
1215 IEM_MC_ADVANCE_RIP_AND_FINISH();
1216 IEM_MC_END();
1217 }
1218 else
1219 {
1220 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1223 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1224 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1226 IEM_MC_PREPARE_AVX_USAGE();
1227
1228 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1229 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1230 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1231
1232 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1233 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1234 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1235 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1236 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1237 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1238 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1239 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1240
1241 IEM_MC_ADVANCE_RIP_AND_FINISH();
1242 IEM_MC_END();
1243 }
1244 }
1245}
1246
1247
1248/** Opcode VEX.66.0F3A 0x4a (vex only).
1249 * AVX, AVX */
1250FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
1251{
1252 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPS, vblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
1253 IEMOPBLENDOP_INIT_VARS(vblendvps);
1254 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1255}
1256
1257
1258/** Opcode VEX.66.0F3A 0x4b (vex only).
1259 * AVX, AVX */
1260FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
1261{
1262 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPD, vblendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
1263 IEMOPBLENDOP_INIT_VARS(vblendvpd);
1264 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1265}
1266
1267
1268/**
1269 * Common worker for AVX2 instructions on the forms:
1270 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
1271 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
1272 *
1273 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
1274 * Additionally, both VEX.W and VEX.L must be zero.
1275 */
1276FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1277{
1278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1279 if (IEM_IS_MODRM_REG_MODE(bRm))
1280 {
1281 /*
1282 * Register, register.
1283 */
1284 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1285 if (pVCpu->iem.s.uVexLength)
1286 {
1287 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1288 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
1289 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1290 IEM_MC_PREPARE_AVX_USAGE();
1291
1292 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1293 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1294 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1295
1296 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1297 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1298 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1299
1300 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1301 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1302 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1303
1304 IEM_MC_LOCAL(RTUINT256U, uDst);
1305 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1306
1307 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1308
1309 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1310 IEM_MC_ADVANCE_RIP_AND_FINISH();
1311 IEM_MC_END();
1312 }
1313 else
1314 {
1315 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1316 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1317 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1318 IEM_MC_PREPARE_AVX_USAGE();
1319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1320 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1321 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1322 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1323 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1324 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1325 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1326 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1327 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1328 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1329 IEM_MC_ADVANCE_RIP_AND_FINISH();
1330 IEM_MC_END();
1331 }
1332 }
1333 else
1334 {
1335 /*
1336 * Register, memory.
1337 */
1338 if (pVCpu->iem.s.uVexLength)
1339 {
1340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1342
1343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1344 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1345
1346 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
1347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1348 IEM_MC_PREPARE_AVX_USAGE();
1349
1350 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1351 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1352 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1353
1354 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1355 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1356 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1357
1358 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1359 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1360 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1361
1362 IEM_MC_LOCAL(RTUINT256U, uDst);
1363 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1364
1365 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1366
1367 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1368 IEM_MC_ADVANCE_RIP_AND_FINISH();
1369 IEM_MC_END();
1370 }
1371 else
1372 {
1373 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1377
1378 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1379 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1380 IEM_MC_PREPARE_AVX_USAGE();
1381
1382 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1383 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1384 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1385
1386 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1387 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1388 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1389 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1390 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1391 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1392 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1393 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1394
1395 IEM_MC_ADVANCE_RIP_AND_FINISH();
1396 IEM_MC_END();
1397 }
1398 }
1399}
1400
1401
1402/** Opcode VEX.66.0F3A 0x4c (vex only).
1403 * AVX, AVX2 */
1404FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
1405{
1406 /** @todo testcase: cover VEX.W=1 and check that it triggers \#UD on both real
1407 * and emulated hardware. */
1408 IEMOP_MNEMONIC4(VEX_RVMR, VPBLENDVB, vpblendvb, Vx_WO, Hx, Wx, Lx, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_W_ZERO);
1409 IEMOPBLENDOP_INIT_VARS(vpblendvb);
1410 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1411}
1412
1413
1414/* Opcode VEX.66.0F3A 0x4d - invalid */
1415/* Opcode VEX.66.0F3A 0x4e - invalid */
1416/* Opcode VEX.66.0F3A 0x4f - invalid */
1417
1418
1419/* Opcode VEX.66.0F3A 0x50 - invalid */
1420/* Opcode VEX.66.0F3A 0x51 - invalid */
1421/* Opcode VEX.66.0F3A 0x52 - invalid */
1422/* Opcode VEX.66.0F3A 0x53 - invalid */
1423/* Opcode VEX.66.0F3A 0x54 - invalid */
1424/* Opcode VEX.66.0F3A 0x55 - invalid */
1425/* Opcode VEX.66.0F3A 0x56 - invalid */
1426/* Opcode VEX.66.0F3A 0x57 - invalid */
1427/* Opcode VEX.66.0F3A 0x58 - invalid */
1428/* Opcode VEX.66.0F3A 0x59 - invalid */
1429/* Opcode VEX.66.0F3A 0x5a - invalid */
1430/* Opcode VEX.66.0F3A 0x5b - invalid */
1431/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
1432FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
1433/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
1434FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
1435/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
1436FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
1437/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
1438FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
1439
1440
1441/** Opcode VEX.66.0F3A 0x60. */
1442FNIEMOP_STUB(iemOp_vpcmpestrm_Vdq_Wdq_Ib);
1443/** Opcode VEX.66.0F3A 0x61, */
1444FNIEMOP_STUB(iemOp_vpcmpestri_Vdq_Wdq_Ib);
1445/** Opcode VEX.66.0F3A 0x62. */
1446FNIEMOP_STUB(iemOp_vpcmpistrm_Vdq_Wdq_Ib);
1447/** Opcode VEX.66.0F3A 0x63*/
1448FNIEMOP_STUB(iemOp_vpcmpistri_Vdq_Wdq_Ib);
1449/* Opcode VEX.66.0F3A 0x64 - invalid */
1450/* Opcode VEX.66.0F3A 0x65 - invalid */
1451/* Opcode VEX.66.0F3A 0x66 - invalid */
1452/* Opcode VEX.66.0F3A 0x67 - invalid */
1453/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
1454FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
1455/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
1456FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
1457/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
1458FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
1459/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
1460FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
1461/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
1462FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
1463/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
1464FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
1465/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
1466FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
1467/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
1468FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
1469
1470/* Opcode VEX.66.0F3A 0x70 - invalid */
1471/* Opcode VEX.66.0F3A 0x71 - invalid */
1472/* Opcode VEX.66.0F3A 0x72 - invalid */
1473/* Opcode VEX.66.0F3A 0x73 - invalid */
1474/* Opcode VEX.66.0F3A 0x74 - invalid */
1475/* Opcode VEX.66.0F3A 0x75 - invalid */
1476/* Opcode VEX.66.0F3A 0x76 - invalid */
1477/* Opcode VEX.66.0F3A 0x77 - invalid */
1478/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
1479FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
1480/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
1481FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
1482/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
1483FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
1484/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
1485FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
1486/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
1487FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
1488/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
1489FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
1490/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
1491FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
1492/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
1493FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
1494
1495/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
1496
1497
1498/* Opcode 0x0f 0xc0 - invalid */
1499/* Opcode 0x0f 0xc1 - invalid */
1500/* Opcode 0x0f 0xc2 - invalid */
1501/* Opcode 0x0f 0xc3 - invalid */
1502/* Opcode 0x0f 0xc4 - invalid */
1503/* Opcode 0x0f 0xc5 - invalid */
1504/* Opcode 0x0f 0xc6 - invalid */
1505/* Opcode 0x0f 0xc7 - invalid */
1506/* Opcode 0x0f 0xc8 - invalid */
1507/* Opcode 0x0f 0xc9 - invalid */
1508/* Opcode 0x0f 0xca - invalid */
1509/* Opcode 0x0f 0xcb - invalid */
1510/* Opcode 0x0f 0xcc - invalid */
1511/* Opcode 0x0f 0xcd - invalid */
1512/* Opcode 0x0f 0xce - invalid */
1513/* Opcode 0x0f 0xcf - invalid */
1514
1515
1516/* Opcode VEX.66.0F3A 0xd0 - invalid */
1517/* Opcode VEX.66.0F3A 0xd1 - invalid */
1518/* Opcode VEX.66.0F3A 0xd2 - invalid */
1519/* Opcode VEX.66.0F3A 0xd3 - invalid */
1520/* Opcode VEX.66.0F3A 0xd4 - invalid */
1521/* Opcode VEX.66.0F3A 0xd5 - invalid */
1522/* Opcode VEX.66.0F3A 0xd6 - invalid */
1523/* Opcode VEX.66.0F3A 0xd7 - invalid */
1524/* Opcode VEX.66.0F3A 0xd8 - invalid */
1525/* Opcode VEX.66.0F3A 0xd9 - invalid */
1526/* Opcode VEX.66.0F3A 0xda - invalid */
1527/* Opcode VEX.66.0F3A 0xdb - invalid */
1528/* Opcode VEX.66.0F3A 0xdc - invalid */
1529/* Opcode VEX.66.0F3A 0xdd - invalid */
1530/* Opcode VEX.66.0F3A 0xde - invalid */
1531/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
1532FNIEMOP_STUB(iemOp_vaeskeygen_Vdq_Wdq_Ib);
1533
1534
1535/**
1536 * @opcode 0xf0
1537 * @oppfx 0xf2
1538 * @opflclass unchanged
1539 */
1540FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
1541{
1542 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
1543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1544 if (IEM_IS_MODRM_REG_MODE(bRm))
1545 {
1546 /*
1547 * Register, register.
1548 */
1549 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1550 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1551 {
1552 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1553 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1554 IEM_MC_ARG(uint64_t *, pDst, 0);
1555 IEM_MC_ARG(uint64_t, uSrc1, 1);
1556 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1557 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1558 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1559 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1560 IEM_MC_ADVANCE_RIP_AND_FINISH();
1561 IEM_MC_END();
1562 }
1563 else
1564 {
1565 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1566 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1567 IEM_MC_ARG(uint32_t *, pDst, 0);
1568 IEM_MC_ARG(uint32_t, uSrc1, 1);
1569 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1570 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
1571 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1572 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1573 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1574 IEM_MC_ADVANCE_RIP_AND_FINISH();
1575 IEM_MC_END();
1576 }
1577 }
1578 else
1579 {
1580 /*
1581 * Register, memory.
1582 */
1583 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1584 {
1585 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1586 IEM_MC_ARG(uint64_t *, pDst, 0);
1587 IEM_MC_ARG(uint64_t, uSrc1, 1);
1588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1590 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1591 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
1592 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1593 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1594 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1595 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
1596 IEM_MC_ADVANCE_RIP_AND_FINISH();
1597 IEM_MC_END();
1598 }
1599 else
1600 {
1601 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1602 IEM_MC_ARG(uint32_t *, pDst, 0);
1603 IEM_MC_ARG(uint32_t, uSrc1, 1);
1604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1606 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
1607 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
1608 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
1609 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1610 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1611 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
1612 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
1613 IEM_MC_ADVANCE_RIP_AND_FINISH();
1614 IEM_MC_END();
1615 }
1616 }
1617}
1618
1619
1620/**
1621 * VEX opcode map \#3.
1622 *
1623 * @sa g_apfnThreeByte0f3a
1624 */
1625const PFNIEMOP g_apfnVexMap3[] =
1626{
1627 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1628 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1629 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermqd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1630 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1631 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1632 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1633 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1634 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1635 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1636 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1637 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1638 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1639 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1640 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1641 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1642 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1643 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1644
1645 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1646 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1647 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1648 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1649 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_Eb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1650 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_Ew_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1651 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_Ey_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1652 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1653 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1654 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1655 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1656 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1657 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1658 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1659 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1660 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1661
1662 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1663 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1664 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1665 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1666 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1667 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1668 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1669 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1670 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1671 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1672 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1673 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1674 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1675 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1676 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1677 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1678
1679 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1680 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1681 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1682 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1683 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1684 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1685 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1686 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1687 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1688 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1689 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1690 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1691 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1692 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1693 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1694 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1695
1696 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1697 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1698 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1699 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1700 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1701 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1702 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1703 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1704 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1705 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1706 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1707 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1708 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1709 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1710 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1711 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1712
1713 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1714 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1715 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1716 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1717 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1718 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1719 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1720 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1721 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1722 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1723 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1724 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1725 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1726 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1727 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1728 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1729
1730 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1731 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1732 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1733 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1734 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1735 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1736 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1737 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1738 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1739 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1740 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1741 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1742 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1743 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1744 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1745 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1746
1747 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1748 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1749 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1750 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1751 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1752 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1753 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1754 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1755 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1756 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1757 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1758 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1759 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1760 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1761 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1762 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1763
1764 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1765 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1766 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1767 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1768 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1769 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1770 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1771 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1772 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1773 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1774 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1775 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1776 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1777 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1778 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1779 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1780
1781 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1782 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1783 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1784 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1785 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1786 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1787 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1788 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1789 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1790 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1791 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1792 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1793 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1794 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1795 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1796 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1797
1798 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1799 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1800 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1801 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1802 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1803 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1804 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1805 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1806 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1807 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1808 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1809 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1810 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1811 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1812 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1813 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1814
1815 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1816 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1817 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1818 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1819 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1820 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1821 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1822 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1823 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1824 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1825 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1826 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1827 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1828 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1829 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1830 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1831
1832 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1833 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1834 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1835 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1836 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1837 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1838 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1839 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1840 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1841 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1842 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1843 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1844 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1845 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1846 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1847 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1848
1849 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1850 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1851 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1852 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1853 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1854 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1855 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1856 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1857 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1858 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1859 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1860 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1861 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1862 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1863 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1864 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1865
1866 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1867 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1868 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1869 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1870 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1871 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1872 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1873 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1874 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1875 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1876 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1877 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1878 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1879 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1880 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1881 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1882
1883 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
1884 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1885 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1886 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1887 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1888 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1889 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1890 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1891 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1892 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1893 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1894 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1895 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1896 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1897 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1898 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1899};
1900AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
1901
1902/** @} */
1903
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette