VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h@ 105414

Last change on this file since 105414 was 105359, checked in by vboxsync, 7 months ago

VMM/IEM: Implement vpps and vppd instruction emulation (fallback missing), bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 133.7 KB
Line 
1/* $Id: IEMAllInstVexMap3.cpp.h 105359 2024-07-16 14:14:06Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
56 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
57 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
58 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
59 IEM_MC_PREPARE_AVX_USAGE();
60
61 IEM_MC_LOCAL(RTUINT256U, uDst);
62 IEM_MC_LOCAL(RTUINT256U, uSrc1);
63 IEM_MC_LOCAL(RTUINT256U, uSrc2);
64 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
65 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
66 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
67 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
68 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
69 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
70 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
71 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
72 IEM_MC_ADVANCE_RIP_AND_FINISH();
73 IEM_MC_END();
74 }
75 else
76 {
77 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
78 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
79 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
80 IEM_MC_ARG(PRTUINT128U, puDst, 0);
81 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
82 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
83 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
84 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
85 IEM_MC_PREPARE_AVX_USAGE();
86 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
87 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
88 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
89 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
90 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94 }
95 else
96 {
97 /*
98 * Register, memory.
99 */
100 if (pVCpu->iem.s.uVexLength)
101 {
102 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
103 IEM_MC_LOCAL(RTUINT256U, uDst);
104 IEM_MC_LOCAL(RTUINT256U, uSrc1);
105 IEM_MC_LOCAL(RTUINT256U, uSrc2);
106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
107 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
109 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
110
111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
113 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
114 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
116 IEM_MC_PREPARE_AVX_USAGE();
117
118 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
119 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
120 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
121 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
122
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
129 IEM_MC_LOCAL(RTUINT128U, uSrc2);
130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
132 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
134
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
136 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
137 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
138 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
140 IEM_MC_PREPARE_AVX_USAGE();
141
142 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
143 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
144 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
145 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
146 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
147
148 IEM_MC_ADVANCE_RIP_AND_FINISH();
149 IEM_MC_END();
150 }
151 }
152}
153
154
155/**
156 * Common worker for AVX instructions on the forms:
157 * - vxxxp{s,d} xmm0, xmm1/mem128, imm8
158 * - vxxxp{s,d} ymm0, ymm1/mem256, imm8
159 *
160 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
161 */
162FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib, PCIEMOPMEDIAF2IMM8, pImpl)
163{
164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
165 if (IEM_IS_MODRM_REG_MODE(bRm))
166 {
167 /*
168 * Register, register.
169 */
170 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
171 if (pVCpu->iem.s.uVexLength)
172 {
173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
176 IEM_MC_PREPARE_AVX_USAGE();
177 IEM_MC_LOCAL(X86YMMREG, uDst);
178 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
179 IEM_MC_LOCAL(X86YMMREG, uSrc);
180 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
181 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
183 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
185 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
186 IEM_MC_ADVANCE_RIP_AND_FINISH();
187 IEM_MC_END();
188 }
189 else
190 {
191 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
192 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
193 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
194 IEM_MC_PREPARE_AVX_USAGE();
195 IEM_MC_LOCAL(X86XMMREG, uDst);
196 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
197 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
198 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
199 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
200 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
201 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
202 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
203 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
204 IEM_MC_ADVANCE_RIP_AND_FINISH();
205 IEM_MC_END();
206 }
207 }
208 else
209 {
210 /*
211 * Register, memory.
212 */
213 if (pVCpu->iem.s.uVexLength)
214 {
215 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
218 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
219 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
220 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
221 IEM_MC_PREPARE_AVX_USAGE();
222 IEM_MC_LOCAL(X86YMMREG, uDst);
223 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
224 IEM_MC_LOCAL(X86YMMREG, uSrc);
225 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
226 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
227 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
228 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
229 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
230 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
231 IEM_MC_ADVANCE_RIP_AND_FINISH();
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
239 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
240 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
242 IEM_MC_PREPARE_AVX_USAGE();
243 IEM_MC_LOCAL(X86XMMREG, uDst);
244 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
245 IEM_MC_LOCAL(X86XMMREG, uSrc);
246 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
247 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
248 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
249 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
250 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
251 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
252 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
253 IEM_MC_ADVANCE_RIP_AND_FINISH();
254 IEM_MC_END();
255 }
256 }
257}
258
259
260/**
261 * Common worker for AVX instructions on the forms:
262 * - vpermilps/d xmm0, xmm1/mem128, imm8
263 * - vpermilps/d ymm0, ymm1/mem256, imm8
264 *
265 * Takes function table for function w/o implicit state parameter.
266 *
267 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
268 */
269FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF2IMM8, pImpl)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
278 if (pVCpu->iem.s.uVexLength)
279 {
280 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
281 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
282 IEM_MC_LOCAL(RTUINT256U, uDst);
283 IEM_MC_LOCAL(RTUINT256U, uSrc);
284 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
285 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
286 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
287 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
288 IEM_MC_PREPARE_AVX_USAGE();
289 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
291 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
292 IEM_MC_ADVANCE_RIP_AND_FINISH();
293 IEM_MC_END();
294 }
295 else
296 {
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
299 IEM_MC_ARG(PRTUINT128U, puDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
301 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
302 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
303 IEM_MC_PREPARE_AVX_USAGE();
304 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
305 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
307 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
308 IEM_MC_ADVANCE_RIP_AND_FINISH();
309 IEM_MC_END();
310 }
311 }
312 else
313 {
314 /*
315 * Register, memory.
316 */
317 if (pVCpu->iem.s.uVexLength)
318 {
319 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
320 IEM_MC_LOCAL(RTUINT256U, uDst);
321 IEM_MC_LOCAL(RTUINT256U, uSrc);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
324 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
325
326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
327 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
328 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
329 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
331 IEM_MC_PREPARE_AVX_USAGE();
332
333 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
335 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
336
337 IEM_MC_ADVANCE_RIP_AND_FINISH();
338 IEM_MC_END();
339 }
340 else
341 {
342 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
343 IEM_MC_LOCAL(RTUINT128U, uSrc);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG(PRTUINT128U, puDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
347
348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
349 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
350 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
351 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
353 IEM_MC_PREPARE_AVX_USAGE();
354
355 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
356 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
357 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
358 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
359
360 IEM_MC_ADVANCE_RIP_AND_FINISH();
361 IEM_MC_END();
362 }
363 }
364}
365
366
367/**
368 * Common worker for AVX instructions on the forms:
369 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
370 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
371 *
372 * Takes function table for function w/o implicit state parameter.
373 *
374 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
375 */
376FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
377{
378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
379 if (IEM_IS_MODRM_REG_MODE(bRm))
380 {
381 /*
382 * Register, register.
383 */
384 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
385 if (pVCpu->iem.s.uVexLength)
386 {
387 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
388 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
389 IEM_MC_LOCAL(RTUINT256U, uDst);
390 IEM_MC_LOCAL(RTUINT256U, uSrc1);
391 IEM_MC_LOCAL(RTUINT256U, uSrc2);
392 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
393 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
394 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
395 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
396 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
397 IEM_MC_PREPARE_AVX_USAGE();
398 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
399 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
400 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
401 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
402 IEM_MC_ADVANCE_RIP_AND_FINISH();
403 IEM_MC_END();
404 }
405 else
406 {
407 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
408 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
409 IEM_MC_ARG(PRTUINT128U, puDst, 0);
410 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
411 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
412 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
413 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
414 IEM_MC_PREPARE_AVX_USAGE();
415 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
416 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
417 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
418 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
419 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
420 IEM_MC_ADVANCE_RIP_AND_FINISH();
421 IEM_MC_END();
422 }
423 }
424 else
425 {
426 /*
427 * Register, memory.
428 */
429 if (pVCpu->iem.s.uVexLength)
430 {
431 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_LOCAL(RTUINT256U, uDst);
433 IEM_MC_LOCAL(RTUINT256U, uSrc1);
434 IEM_MC_LOCAL(RTUINT256U, uSrc2);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
437 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
438 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
439
440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
441 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
442 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
443 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
444 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
445 IEM_MC_PREPARE_AVX_USAGE();
446
447 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
448 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
449 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451
452 IEM_MC_ADVANCE_RIP_AND_FINISH();
453 IEM_MC_END();
454 }
455 else
456 {
457 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
458 IEM_MC_LOCAL(RTUINT128U, uSrc2);
459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
460 IEM_MC_ARG(PRTUINT128U, puDst, 0);
461 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
462 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
463
464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
465 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
466 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
467 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
468 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
469 IEM_MC_PREPARE_AVX_USAGE();
470
471 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
474 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
475 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
476
477 IEM_MC_ADVANCE_RIP_AND_FINISH();
478 IEM_MC_END();
479 }
480 }
481}
482
483
484/** Opcode VEX.66.0F3A 0x00. */
485FNIEMOP_DEF(iemOp_vpermq_Vqq_Wqq_Ib)
486{
487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
488 if (IEM_IS_MODRM_REG_MODE(bRm))
489 {
490 /*
491 * Register, register.
492 */
493 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
494 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
495 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
496 IEM_MC_LOCAL(RTUINT256U, uDst);
497 IEM_MC_LOCAL(RTUINT256U, uSrc);
498 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
499 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
500 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
501 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
502 IEM_MC_PREPARE_AVX_USAGE();
503 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
504 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermq_u256, iemAImpl_vpermq_u256_fallback),
505 puDst, puSrc, bImmArg);
506 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
507 IEM_MC_ADVANCE_RIP_AND_FINISH();
508 IEM_MC_END();
509 }
510 else
511 {
512 /*
513 * Register, memory.
514 */
515 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
518 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
519 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
520 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
521 IEM_MC_PREPARE_AVX_USAGE();
522
523 IEM_MC_LOCAL(RTUINT256U, uSrc);
524 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
525 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
526 IEM_MC_LOCAL(RTUINT256U, uDst);
527 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
528 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
529 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermq_u256, iemAImpl_vpermq_u256_fallback),
530 puDst, puSrc, bImmArg);
531 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
532 IEM_MC_ADVANCE_RIP_AND_FINISH();
533 IEM_MC_END();
534 }
535}
536
537
538/** Opcode VEX.66.0F3A 0x01. */
539FNIEMOP_DEF(iemOp_vpermpd_Vqq_Wqq_Ib)
540{
541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
542 if (IEM_IS_MODRM_REG_MODE(bRm))
543 {
544 /*
545 * Register, register.
546 */
547 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
548 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
549 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
550 IEM_MC_LOCAL(RTUINT256U, uDst);
551 IEM_MC_LOCAL(RTUINT256U, uSrc);
552 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
553 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
554 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
555 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
556 IEM_MC_PREPARE_AVX_USAGE();
557 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
558 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermpd_u256, iemAImpl_vpermpd_u256_fallback),
559 puDst, puSrc, bImmArg);
560 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
561 IEM_MC_ADVANCE_RIP_AND_FINISH();
562 IEM_MC_END();
563 }
564 else
565 {
566 /*
567 * Register, memory.
568 */
569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
572 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
573 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
575 IEM_MC_PREPARE_AVX_USAGE();
576
577 IEM_MC_LOCAL(RTUINT256U, uSrc);
578 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
579 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
580 IEM_MC_LOCAL(RTUINT256U, uDst);
581 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
582 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
583 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermpd_u256, iemAImpl_vpermpd_u256_fallback),
584 puDst, puSrc, bImmArg);
585 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
586 IEM_MC_ADVANCE_RIP_AND_FINISH();
587 IEM_MC_END();
588 }
589}
590
591
592/** Opcode VEX.66.0F3A 0x02.
593 * AVX2,AVX2 */
594FNIEMOP_DEF(iemOp_vpblendd_Vx_Hx_Wx_Ib)
595{
596 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDD, vpblendd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
597 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendd);
598 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
599}
600
601
602/* Opcode VEX.66.0F3A 0x03 - invalid */
603
604
605/** Opcode VEX.66.0F3A 0x04.
606 * AVX,AVX */
607FNIEMOP_DEF(iemOp_vpermilps_Vx_Wx_Ib)
608{
609 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPS, vpermilps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
610 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilps);
611 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
612}
613
614
615/** Opcode VEX.66.0F3A 0x05.
616 * AVX,AVX */
617FNIEMOP_DEF(iemOp_vpermilpd_Vx_Wx_Ib)
618{
619 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPD, vpermilpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
620 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilpd);
621 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
622}
623
624
625/** Opcode VEX.66.0F3A 0x06 (vex only) */
626FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib)
627{
628 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2F128, vperm2f128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
630 if (IEM_IS_MODRM_REG_MODE(bRm))
631 {
632 /*
633 * Register, register.
634 */
635 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
636 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
637 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
638 IEM_MC_LOCAL(RTUINT256U, uDst);
639 IEM_MC_LOCAL(RTUINT256U, uSrc1);
640 IEM_MC_LOCAL(RTUINT256U, uSrc2);
641 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
642 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
643 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
644 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
645 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
646 IEM_MC_PREPARE_AVX_USAGE();
647 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
648 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
649 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
650 puDst, puSrc1, puSrc2, bImmArg);
651 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
652 IEM_MC_ADVANCE_RIP_AND_FINISH();
653 IEM_MC_END();
654 }
655 else
656 {
657 /*
658 * Register, memory.
659 */
660 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
661 IEM_MC_LOCAL(RTUINT256U, uDst);
662 IEM_MC_LOCAL(RTUINT256U, uSrc1);
663 IEM_MC_LOCAL(RTUINT256U, uSrc2);
664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
665 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
666 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
668
669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
670 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
671 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
672 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
673 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
674 IEM_MC_PREPARE_AVX_USAGE();
675
676 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
677 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
678 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
679 puDst, puSrc1, puSrc2, bImmArg);
680 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
681
682 IEM_MC_ADVANCE_RIP_AND_FINISH();
683 IEM_MC_END();
684 }
685}
686
687
688/* Opcode VEX.66.0F3A 0x07 - invalid */
689
690
691/** Opcode VEX.66.0F3A 0x08. */
692FNIEMOP_DEF(iemOp_vroundps_Vx_Wx_Ib)
693{
694 IEMOP_MNEMONIC3(VEX_RMI, VROUNDPS, vroundps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
695 IEMOPMEDIAF2IMM8_INIT_VARS( vroundps);
696 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
697}
698
699
700/** Opcode VEX.66.0F3A 0x09. */
701FNIEMOP_DEF(iemOp_vroundpd_Vx_Wx_Ib)
702{
703 IEMOP_MNEMONIC3(VEX_RMI, VROUNDPD, vroundpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
704 IEMOPMEDIAF2IMM8_INIT_VARS( vroundpd);
705 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
706}
707
708
709/** Opcode VEX.66.0F3A 0x0a. */
710FNIEMOP_DEF(iemOp_vroundss_Vss_Wss_Ib)
711{
712 IEMOP_MNEMONIC4(VEX_RVMI, VROUNDSS, vroundss, Vps, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
714 if (IEM_IS_MODRM_REG_MODE(bRm))
715 {
716 /*
717 * XMM32, XMM32.
718 */
719 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
720 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
721 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
722 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
723 IEM_MC_PREPARE_AVX_USAGE();
724 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
725 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
726 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
727 IEM_MC_LOCAL(X86XMMREG, uDst);
728 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
729 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
730 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback),
731 puDst, pSrc, bImmArg);
732 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
733 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
734 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
735 IEM_MC_ADVANCE_RIP_AND_FINISH();
736 IEM_MC_END();
737 }
738 else
739 {
740 /*
741 * XMM32, [mem32].
742 */
743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
746 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
747 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
748 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
749 IEM_MC_PREPARE_AVX_USAGE();
750 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
751 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
752 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
753 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu),
754 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
755 IEM_MC_LOCAL(X86XMMREG, uDst);
756 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
757 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback),
758 puDst, pSrc, bImmArg);
759 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
760 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
761 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
762 IEM_MC_ADVANCE_RIP_AND_FINISH();
763 IEM_MC_END();
764 }
765}
766
767
768/** Opcode VEX.66.0F3A 0x0b. */
769FNIEMOP_DEF(iemOp_vroundsd_Vsd_Wsd_Ib)
770{
771 IEMOP_MNEMONIC4(VEX_RVMI, VROUNDSD, vroundsd, Vpd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 if (IEM_IS_MODRM_REG_MODE(bRm))
774 {
775 /*
776 * XMM64, XMM64.
777 */
778 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
779 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
780 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
781 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
782 IEM_MC_PREPARE_AVX_USAGE();
783 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
784 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
785 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
786 IEM_MC_LOCAL(X86XMMREG, uDst);
787 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
788 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
789 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback),
790 puDst, pSrc, bImmArg);
791 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
792 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
793 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_ADVANCE_RIP_AND_FINISH();
795 IEM_MC_END();
796 }
797 else
798 {
799 /*
800 * XMM64, [mem64].
801 */
802 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
805 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
806 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
807 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
808 IEM_MC_PREPARE_AVX_USAGE();
809 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
810 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
811 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
812 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu),
813 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
814 IEM_MC_LOCAL(X86XMMREG, uDst);
815 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
816 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback),
817 puDst, pSrc, bImmArg);
818 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
819 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
820 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
821 IEM_MC_ADVANCE_RIP_AND_FINISH();
822 IEM_MC_END();
823 }
824}
825
826
827/** Opcode VEX.66.0F3A 0x0c.
828 * AVX,AVX */
829FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
830{
831 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPS, vblendps, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
832 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
833 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
834}
835
836
837/** Opcode VEX.66.0F3A 0x0d.
838 * AVX,AVX */
839FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
840{
841 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPD, vblendpd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
842 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
843 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
844}
845
846
847/** Opcode VEX.66.0F3A 0x0e.
848 * AVX,AVX2 */
849FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
850{
851 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDW, vpblendw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
852 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
853 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
854}
855
856
857/** Opcode VEX.0F3A 0x0f - invalid. */
858
859
860/** Opcode VEX.66.0F3A 0x0f.
861 * AVX,AVX2 */
862FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
863{
864 IEMOP_MNEMONIC4(VEX_RVMI, VPALIGNR, vpalignr, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
865 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
866 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
867}
868
869
870/* Opcode VEX.66.0F3A 0x10 - invalid */
871/* Opcode VEX.66.0F3A 0x11 - invalid */
872/* Opcode VEX.66.0F3A 0x12 - invalid */
873/* Opcode VEX.66.0F3A 0x13 - invalid */
874
875
876/** Opcode VEX.66.0F3A 0x14 - vpextrb Eb, Vdq, Ib */
877FNIEMOP_DEF(iemOp_vpextrb_Eb_Vdq_Ib)
878{
879 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRB, vpextrb, Eb, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
881 if (IEM_IS_MODRM_REG_MODE(bRm))
882 {
883 /*
884 * greg32, XMM, imm8.
885 */
886 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
887 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
888 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
889 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
890 IEM_MC_PREPARE_AVX_USAGE();
891
892 IEM_MC_LOCAL(uint8_t, uValue);
893 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
894 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
895 IEM_MC_ADVANCE_RIP_AND_FINISH();
896 IEM_MC_END();
897 }
898 else
899 {
900 /*
901 * [mem8], XMM, imm8.
902 */
903 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
904 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
907
908 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
909 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
910 IEM_MC_PREPARE_AVX_USAGE();
911
912 IEM_MC_LOCAL(uint8_t, uValue);
913 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
914 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
915 IEM_MC_ADVANCE_RIP_AND_FINISH();
916 IEM_MC_END();
917 }
918}
919
920
921/** Opcode VEX.66.0F3A 0x15 - vpextrw Ew, Vdq, Ib */
922FNIEMOP_DEF(iemOp_vpextrw_Ew_Vdq_Ib)
923{
924 /** @todo testcase: check that this ignores VEX.W. */
925 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRW, vpextrw, Ew_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
927 if (IEM_IS_MODRM_REG_MODE(bRm))
928 {
929 /*
930 * greg32, XMM, imm8.
931 */
932 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
933 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
934 IEM_MC_LOCAL(uint16_t, uValue);
935
936 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
937 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
938 IEM_MC_PREPARE_AVX_USAGE();
939
940 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
941 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
942 IEM_MC_ADVANCE_RIP_AND_FINISH();
943 IEM_MC_END();
944 }
945 else
946 {
947 /*
948 * [mem16], XMM, imm8.
949 */
950 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
951 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
952 IEM_MC_LOCAL(uint16_t, uValue);
953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
955
956 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
958 IEM_MC_PREPARE_AVX_USAGE();
959
960 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
961 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
962 IEM_MC_ADVANCE_RIP_AND_FINISH();
963 IEM_MC_END();
964 }
965}
966
967
968/** Opcode VEX.66.0F3A 0x16 - vpextrd / vpextrq Eq / Ey, Vdq, Ib */
969FNIEMOP_DEF(iemOp_vpextrd_q_Ey_Vdq_Ib)
970{
971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
972 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
973 {
974 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRQ, vpextrq, Eq_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ONE);
975 if (IEM_IS_MODRM_REG_MODE(bRm))
976 {
977 /*
978 * greg64, XMM, imm8.
979 */
980 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
982 IEM_MC_LOCAL(uint64_t, uValue);
983
984 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
985 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
986 IEM_MC_PREPARE_AVX_USAGE();
987
988 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
989 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
990 IEM_MC_ADVANCE_RIP_AND_FINISH();
991 IEM_MC_END();
992 }
993 else
994 {
995 /*
996 * [mem64], XMM, imm8.
997 */
998 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
999 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1000 IEM_MC_LOCAL(uint64_t, uValue);
1001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1003
1004 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1005 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1006 IEM_MC_PREPARE_AVX_USAGE();
1007
1008 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1009 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
1010 IEM_MC_ADVANCE_RIP_AND_FINISH();
1011 IEM_MC_END();
1012 }
1013 }
1014 else
1015 {
1016 /**
1017 * @opdone
1018 */
1019 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRD, vpextrd, Ey_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ZERO);
1020 if (IEM_IS_MODRM_REG_MODE(bRm))
1021 {
1022 /*
1023 * greg32, XMM, imm8.
1024 */
1025 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1026 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1027 IEM_MC_LOCAL(uint32_t, uValue);
1028
1029 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1031 IEM_MC_PREPARE_AVX_USAGE();
1032
1033 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
1034 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * [mem32], XMM, imm8.
1042 */
1043 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1044 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1045 IEM_MC_LOCAL(uint32_t, uValue);
1046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1048
1049 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1050 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1051 IEM_MC_PREPARE_AVX_USAGE();
1052
1053 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
1054 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
1055 IEM_MC_ADVANCE_RIP_AND_FINISH();
1056 IEM_MC_END();
1057 }
1058 }
1059}
1060
1061
1062/** Opcode VEX.66.0F3A 0x17. */
1063FNIEMOP_DEF(iemOp_vextractps_Ed_Vdq_Ib)
1064{
1065 //IEMOP_MNEMONIC3(VEX_MRI_REG, VEXTRACTPS, vextractps, Ed, Vdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO);
1066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1067 if (IEM_IS_MODRM_REG_MODE(bRm))
1068 {
1069 /*
1070 * greg32, XMM, imm8.
1071 */
1072 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1073 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1074 IEM_MC_LOCAL(uint32_t, uSrc);
1075
1076 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1077 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1078 IEM_MC_PREPARE_AVX_USAGE();
1079
1080 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
1081 IEM_MC_STORE_GREG_U32( IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
1082 IEM_MC_ADVANCE_RIP_AND_FINISH();
1083 IEM_MC_END();
1084 }
1085 else
1086 {
1087 /*
1088 * [mem32], XMM, imm8.
1089 */
1090 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1091 IEM_MC_LOCAL(uint32_t, uSrc);
1092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1093
1094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1095 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1096 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1097 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1098 IEM_MC_PREPARE_AVX_USAGE();
1099
1100 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
1101 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1102 IEM_MC_ADVANCE_RIP_AND_FINISH();
1103 IEM_MC_END();
1104 }
1105}
1106
1107
1108/** Opcode VEX.66.0F3A 0x18 (vex only). */
1109FNIEMOP_DEF(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib)
1110{
1111 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTF128, vinsertf128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1113 if (IEM_IS_MODRM_REG_MODE(bRm))
1114 {
1115 /*
1116 * Register, register.
1117 */
1118 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1119 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1120 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1121 IEM_MC_LOCAL(RTUINT128U, uSrc);
1122
1123 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1124 IEM_MC_PREPARE_AVX_USAGE();
1125
1126 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1127 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1128 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1129
1130 IEM_MC_ADVANCE_RIP_AND_FINISH();
1131 IEM_MC_END();
1132 }
1133 else
1134 {
1135 /*
1136 * Register, memory.
1137 */
1138 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1139 IEM_MC_LOCAL(RTUINT128U, uSrc);
1140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1141
1142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1143 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1144 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1145 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1146 IEM_MC_PREPARE_AVX_USAGE();
1147
1148 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1149 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1150 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1151
1152 IEM_MC_ADVANCE_RIP_AND_FINISH();
1153 IEM_MC_END();
1154 }
1155}
1156
1157
1158/** Opcode VEX.66.0F3A 0x19 (vex only). */
1159FNIEMOP_DEF(iemOp_vextractf128_Wdq_Vqq_Ib)
1160{
1161 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTF128, vextractf128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
1162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1163 if (IEM_IS_MODRM_REG_MODE(bRm))
1164 {
1165 /*
1166 * Register, register.
1167 */
1168 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1169 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1170 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1171
1172 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1173 IEM_MC_PREPARE_AVX_USAGE();
1174
1175 IEM_MC_LOCAL(RTUINT128U, uDst);
1176 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1177 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
1178
1179 IEM_MC_ADVANCE_RIP_AND_FINISH();
1180 IEM_MC_END();
1181 }
1182 else
1183 {
1184 /*
1185 * Register, memory.
1186 */
1187 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1189
1190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1191 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1192 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1193 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1194 IEM_MC_PREPARE_AVX_USAGE();
1195
1196 IEM_MC_LOCAL(RTUINT128U, uDst);
1197 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1198 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
1199
1200 IEM_MC_ADVANCE_RIP_AND_FINISH();
1201 IEM_MC_END();
1202 }
1203}
1204
1205
1206/* Opcode VEX.66.0F3A 0x1a - invalid */
1207/* Opcode VEX.66.0F3A 0x1b - invalid */
1208/* Opcode VEX.66.0F3A 0x1c - invalid */
1209/** Opcode VEX.66.0F3A 0x1d (vex only). */
1210FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
1211/* Opcode VEX.66.0F3A 0x1e - invalid */
1212/* Opcode VEX.66.0F3A 0x1f - invalid */
1213
1214
1215/** Opcode VEX.66.0F3A 0x20. */
1216FNIEMOP_DEF(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib)
1217{
1218 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRB, vpinsrb, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1220 if (IEM_IS_MODRM_REG_MODE(bRm))
1221 {
1222 /*
1223 * Register, register.
1224 */
1225 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1226 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1227 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1228 IEM_MC_LOCAL(uint8_t, uValue);
1229
1230 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1231 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1232 IEM_MC_PREPARE_AVX_USAGE();
1233
1234 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1235 IEM_MC_FETCH_GREG_U8(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1236 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1237 IEM_MC_STORE_XREG_U8( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15, uValue);
1238 IEM_MC_ADVANCE_RIP_AND_FINISH();
1239 IEM_MC_END();
1240 }
1241 else
1242 {
1243 /*
1244 * Register, memory.
1245 */
1246 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1248 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1249 IEM_MC_LOCAL(uint8_t, uValue);
1250
1251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1252 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1253 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1255 IEM_MC_PREPARE_AVX_USAGE();
1256
1257 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1258 IEM_MC_FETCH_MEM_U8(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1259 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1260 IEM_MC_STORE_XREG_U8( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15, uValue);
1261 IEM_MC_ADVANCE_RIP_AND_FINISH();
1262 IEM_MC_END();
1263 }
1264}
1265
1266
1267/** Opcode VEX.66.0F3A 0x21, */
1268FNIEMOP_DEF(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib)
1269{
1270 //IEMOP_MNEMONIC4(VEX_RVMR_REG, VINSERTPS, vinsertps, Vdq, Hdq, UdqMd, Ib, DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO); /// @todo
1271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1272 if (IEM_IS_MODRM_REG_MODE(bRm))
1273 {
1274 /*
1275 * XMM, XMM, XMM, imm8.
1276 */
1277 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1278 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1279 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1280 IEM_MC_LOCAL(uint32_t, uSrc2);
1281
1282 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1283 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1284 IEM_MC_PREPARE_AVX_USAGE();
1285
1286 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1287 IEM_MC_FETCH_XREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), (bImm >> 6) & 3);
1288 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1289 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2);
1290 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
1291 IEM_MC_ADVANCE_RIP_AND_FINISH();
1292 IEM_MC_END();
1293 }
1294 else
1295 {
1296 /*
1297 * XMM, XMM, [mem32], imm8.
1298 */
1299 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1301 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1302 IEM_MC_LOCAL(uint32_t, uSrc2);
1303
1304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1305 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1306 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1307 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1308 IEM_MC_PREPARE_AVX_USAGE();
1309
1310 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1311 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1312 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1313 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2);
1314 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
1315 IEM_MC_ADVANCE_RIP_AND_FINISH();
1316 IEM_MC_END();
1317 }
1318}
1319
1320
1321/** Opcode VEX.66.0F3A 0x22. */
1322FNIEMOP_DEF(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib)
1323{
1324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1325 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1326 {
1327 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRQ, vpinsrq, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1328 if (IEM_IS_MODRM_REG_MODE(bRm))
1329 {
1330 /*
1331 * Register, register.
1332 */
1333 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1334 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1335 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1336 IEM_MC_LOCAL(uint64_t, uValue);
1337
1338 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1339 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1340 IEM_MC_PREPARE_AVX_USAGE();
1341
1342 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1343 IEM_MC_FETCH_GREG_U64(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1344 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1345 IEM_MC_STORE_XREG_U64( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uValue);
1346 IEM_MC_ADVANCE_RIP_AND_FINISH();
1347 IEM_MC_END();
1348 }
1349 else
1350 {
1351 /*
1352 * Register, memory.
1353 */
1354 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1356 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1357 IEM_MC_LOCAL(uint64_t, uValue);
1358
1359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1360 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1361 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1362 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1363 IEM_MC_PREPARE_AVX_USAGE();
1364
1365 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1366 IEM_MC_FETCH_MEM_U64(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1367 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1368 IEM_MC_STORE_XREG_U64( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uValue);
1369 IEM_MC_ADVANCE_RIP_AND_FINISH();
1370 IEM_MC_END();
1371 }
1372 }
1373 else
1374 {
1375 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRD, vpinsrd, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1376 if (IEM_IS_MODRM_REG_MODE(bRm))
1377 {
1378 /*
1379 * Register, register.
1380 */
1381 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1382 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1383 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1384 IEM_MC_LOCAL(uint32_t, uValue);
1385
1386 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1387 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1388 IEM_MC_PREPARE_AVX_USAGE();
1389
1390 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1391 IEM_MC_FETCH_GREG_U32(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1392 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1393 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
1394 IEM_MC_ADVANCE_RIP_AND_FINISH();
1395 IEM_MC_END();
1396 }
1397 else
1398 {
1399 /*
1400 * Register, memory.
1401 */
1402 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1404 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1405 IEM_MC_LOCAL(uint32_t, uValue);
1406
1407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1408 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1409 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1410 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1411 IEM_MC_PREPARE_AVX_USAGE();
1412
1413 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1414 IEM_MC_FETCH_MEM_U32(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1415 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1416 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
1417 IEM_MC_ADVANCE_RIP_AND_FINISH();
1418 IEM_MC_END();
1419 }
1420 }
1421}
1422
1423
1424/* Opcode VEX.66.0F3A 0x23 - invalid */
1425/* Opcode VEX.66.0F3A 0x24 - invalid */
1426/* Opcode VEX.66.0F3A 0x25 - invalid */
1427/* Opcode VEX.66.0F3A 0x26 - invalid */
1428/* Opcode VEX.66.0F3A 0x27 - invalid */
1429/* Opcode VEX.66.0F3A 0x28 - invalid */
1430/* Opcode VEX.66.0F3A 0x29 - invalid */
1431/* Opcode VEX.66.0F3A 0x2a - invalid */
1432/* Opcode VEX.66.0F3A 0x2b - invalid */
1433/* Opcode VEX.66.0F3A 0x2c - invalid */
1434/* Opcode VEX.66.0F3A 0x2d - invalid */
1435/* Opcode VEX.66.0F3A 0x2e - invalid */
1436/* Opcode VEX.66.0F3A 0x2f - invalid */
1437
1438
1439/* Opcode VEX.66.0F3A 0x30 - invalid */
1440/* Opcode VEX.66.0F3A 0x31 - invalid */
1441/* Opcode VEX.66.0F3A 0x32 - invalid */
1442/* Opcode VEX.66.0F3A 0x33 - invalid */
1443/* Opcode VEX.66.0F3A 0x34 - invalid */
1444/* Opcode VEX.66.0F3A 0x35 - invalid */
1445/* Opcode VEX.66.0F3A 0x36 - invalid */
1446/* Opcode VEX.66.0F3A 0x37 - invalid */
1447
1448
1449/** Opcode VEX.66.0F3A 0x38 (vex only). */
1450FNIEMOP_DEF(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib)
1451{
1452 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTI128, vinserti128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1454 if (IEM_IS_MODRM_REG_MODE(bRm))
1455 {
1456 /*
1457 * Register, register.
1458 */
1459 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1460 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1461 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1462 IEM_MC_LOCAL(RTUINT128U, uSrc);
1463
1464 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1465 IEM_MC_PREPARE_AVX_USAGE();
1466
1467 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1468 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1469 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1470
1471 IEM_MC_ADVANCE_RIP_AND_FINISH();
1472 IEM_MC_END();
1473 }
1474 else
1475 {
1476 /*
1477 * Register, memory.
1478 */
1479 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1480 IEM_MC_LOCAL(RTUINT128U, uSrc);
1481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1482
1483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1484 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1485 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1487 IEM_MC_PREPARE_AVX_USAGE();
1488
1489 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1490 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1491 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1492
1493 IEM_MC_ADVANCE_RIP_AND_FINISH();
1494 IEM_MC_END();
1495 }
1496}
1497
1498
1499/** Opcode VEX.66.0F3A 0x39 (vex only). */
1500FNIEMOP_DEF(iemOp_vextracti128_Wdq_Vqq_Ib)
1501{
1502 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTI128, vextracti128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
1503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1504 if (IEM_IS_MODRM_REG_MODE(bRm))
1505 {
1506 /*
1507 * Register, register.
1508 */
1509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1510 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1511 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1512
1513 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1514 IEM_MC_PREPARE_AVX_USAGE();
1515
1516 IEM_MC_LOCAL(RTUINT128U, uDst);
1517 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1518 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
1519
1520 IEM_MC_ADVANCE_RIP_AND_FINISH();
1521 IEM_MC_END();
1522 }
1523 else
1524 {
1525 /*
1526 * Register, memory.
1527 */
1528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1533 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1534 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1535 IEM_MC_PREPARE_AVX_USAGE();
1536
1537 IEM_MC_LOCAL(RTUINT128U, uDst);
1538 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1539 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
1540
1541 IEM_MC_ADVANCE_RIP_AND_FINISH();
1542 IEM_MC_END();
1543 }
1544}
1545
1546
1547/* Opcode VEX.66.0F3A 0x3a - invalid */
1548/* Opcode VEX.66.0F3A 0x3b - invalid */
1549/* Opcode VEX.66.0F3A 0x3c - invalid */
1550/* Opcode VEX.66.0F3A 0x3d - invalid */
1551/* Opcode VEX.66.0F3A 0x3e - invalid */
1552/* Opcode VEX.66.0F3A 0x3f - invalid */
1553
1554
1555/** Opcode VEX.66.0F3A 0x40. */
1556FNIEMOP_DEF(iemOp_vdpps_Vx_Hx_Wx_Ib)
1557{
1558 IEMOP_MNEMONIC4(VEX_RVMI, VDPPS, vdpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1560 if (IEM_IS_MODRM_REG_MODE(bRm))
1561 {
1562 /*
1563 * Register, Register
1564 */
1565 if (pVCpu->iem.s.uVexLength)
1566 {
1567 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1568 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1569 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1570 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1571 IEM_MC_PREPARE_AVX_USAGE();
1572 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc);
1573 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1);
1574 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
1575 IEM_MC_LOCAL(X86YMMREG, uDst);
1576 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
1577 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1578 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdpps_u256, iemAImpl_vdpps_u256_fallback),
1579 puDst, puSrc, bImmArg);
1580 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1581 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1582 IEM_MC_ADVANCE_RIP_AND_FINISH();
1583 IEM_MC_END();
1584 }
1585 else
1586 {
1587 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1588 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1589 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1590 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1591 IEM_MC_PREPARE_AVX_USAGE();
1592 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
1593 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
1594 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
1595 IEM_MC_LOCAL(X86XMMREG, uDst);
1596 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
1597 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1598 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdpps_u128, iemAImpl_vdpps_u128_fallback),
1599 puDst, puSrc, bImmArg);
1600 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1601 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1602 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1603 IEM_MC_ADVANCE_RIP_AND_FINISH();
1604 IEM_MC_END();
1605 }
1606 }
1607 else
1608 {
1609 /*
1610 * Register, Memory.
1611 */
1612 if (pVCpu->iem.s.uVexLength)
1613 {
1614 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1617 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1618 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1619 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1620 IEM_MC_PREPARE_AVX_USAGE();
1621 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc);
1622 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1);
1623 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1624 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu),
1625 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1626 IEM_MC_LOCAL(X86YMMREG, uDst);
1627 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
1628 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdpps_u256, iemAImpl_vdpps_u256_fallback),
1629 puDst, puSrc, bImmArg);
1630 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1631 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1632 IEM_MC_ADVANCE_RIP_AND_FINISH();
1633 IEM_MC_END();
1634 }
1635 else
1636 {
1637 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1640 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1641 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1642 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1643 IEM_MC_PREPARE_AVX_USAGE();
1644 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
1645 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
1646 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1647 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu),
1648 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1649 IEM_MC_LOCAL(X86XMMREG, uDst);
1650 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
1651 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdpps_u128, iemAImpl_vdpps_u128_fallback),
1652 puDst, puSrc, bImmArg);
1653 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1654 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1655 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1656 IEM_MC_ADVANCE_RIP_AND_FINISH();
1657 IEM_MC_END();
1658 }
1659 }
1660}
1661
1662
1663/** Opcode VEX.66.0F3A 0x41, */
1664FNIEMOP_DEF(iemOp_vdppd_Vdq_Hdq_Wdq_Ib)
1665{
1666 IEMOP_MNEMONIC4(VEX_RVMI, VDPPD, vdppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO);
1667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1668 if (IEM_IS_MODRM_REG_MODE(bRm))
1669 {
1670 /*
1671 * Register, Register
1672 */
1673 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1674 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1675 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1676 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1677 IEM_MC_PREPARE_AVX_USAGE();
1678 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
1679 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
1680 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
1681 IEM_MC_LOCAL(X86XMMREG, uDst);
1682 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
1683 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1684 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdppd_u128, iemAImpl_vdppd_u128_fallback),
1685 puDst, puSrc, bImmArg);
1686 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1687 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1688 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1689 IEM_MC_ADVANCE_RIP_AND_FINISH();
1690 IEM_MC_END();
1691 }
1692 else
1693 {
1694 /*
1695 * Register, Memory.
1696 */
1697 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1700 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1701 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1702 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1703 IEM_MC_PREPARE_AVX_USAGE();
1704 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
1705 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
1706 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1707 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu),
1708 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1709 IEM_MC_LOCAL(X86XMMREG, uDst);
1710 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
1711 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdppd_u128, iemAImpl_vdppd_u128_fallback),
1712 puDst, puSrc, bImmArg);
1713 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1714 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1715 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1716 IEM_MC_ADVANCE_RIP_AND_FINISH();
1717 IEM_MC_END();
1718 }
1719}
1720
1721
1722/** Opcode VEX.66.0F3A 0x42. */
1723FNIEMOP_DEF(iemOp_vmpsadbw_Vx_Hx_Wx_Ib)
1724{
1725 IEMOP_MNEMONIC4(VEX_RVMI, VMPSADBW, vmpsadbw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1726 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vmpsadbw);
1727 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1728}
1729
1730
1731/* Opcode VEX.66.0F3A 0x43 - invalid */
1732
1733
1734/** Opcode VEX.66.0F3A 0x44. */
1735FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
1736{
1737 IEMOP_MNEMONIC4(VEX_RVMI, VPCLMULQDQ, vpclmulqdq, Vdq_WO, Hdq, Wdq, Id, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1739 if (IEM_IS_MODRM_REG_MODE(bRm))
1740 {
1741 /*
1742 * Register, register.
1743 */
1744 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1745 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1746 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1747 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1748 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1749 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1750 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1751 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1752 IEM_MC_PREPARE_AVX_USAGE();
1753 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1754 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1755 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1756 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1757 puDst, puSrc1, puSrc2, bImmArg);
1758 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1759 IEM_MC_ADVANCE_RIP_AND_FINISH();
1760 IEM_MC_END();
1761 }
1762 else
1763 {
1764 /*
1765 * Register, memory.
1766 */
1767 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1768 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1770 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1771 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1772 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1773
1774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1775 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1776 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1777 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1778 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1779 IEM_MC_PREPARE_AVX_USAGE();
1780
1781 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1782 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1783 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1784 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1785 puDst, puSrc1, puSrc2, bImmArg);
1786 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1787
1788 IEM_MC_ADVANCE_RIP_AND_FINISH();
1789 IEM_MC_END();
1790 }
1791}
1792
1793
1794/* Opcode VEX.66.0F3A 0x45 - invalid */
1795
1796
1797/** Opcode VEX.66.0F3A 0x46 (vex only) */
1798FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib)
1799{
1800 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2I128, vperm2i128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ONE);
1801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1802 if (IEM_IS_MODRM_REG_MODE(bRm))
1803 {
1804 /*
1805 * Register, register.
1806 */
1807 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1808 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1809 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1810 IEM_MC_LOCAL(RTUINT256U, uDst);
1811 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1812 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1813 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1814 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1815 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1816 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1817 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1818 IEM_MC_PREPARE_AVX_USAGE();
1819 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1820 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1821 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1822 puDst, puSrc1, puSrc2, bImmArg);
1823 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1824 IEM_MC_ADVANCE_RIP_AND_FINISH();
1825 IEM_MC_END();
1826 }
1827 else
1828 {
1829 /*
1830 * Register, memory.
1831 */
1832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1833 IEM_MC_LOCAL(RTUINT256U, uDst);
1834 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1835 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1837 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1838 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1839 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1840
1841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1842 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1843 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1844 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1845 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1846 IEM_MC_PREPARE_AVX_USAGE();
1847
1848 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1849 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1850 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1851 puDst, puSrc1, puSrc2, bImmArg);
1852 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1853
1854 IEM_MC_ADVANCE_RIP_AND_FINISH();
1855 IEM_MC_END();
1856 }
1857}
1858
1859
1860/* Opcode VEX.66.0F3A 0x47 - invalid */
1861/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
1862FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
1863/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
1864FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
1865
1866
1867/**
1868 * Common worker for AVX2 instructions on the forms:
1869 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
1870 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
1871 *
1872 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
1873 * Additionally, it triggers \#UD if VEX.W is 1.
1874 */
1875FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1876{
1877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1878 if (IEM_IS_MODRM_REG_MODE(bRm))
1879 {
1880 /*
1881 * Register, register.
1882 */
1883 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1884 if (pVCpu->iem.s.uVexLength)
1885 {
1886 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1887 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1888 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1889 IEM_MC_PREPARE_AVX_USAGE();
1890 IEM_MC_LOCAL(RTUINT256U, uDst);
1891 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1892 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1893 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1894 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1895 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1896 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1897 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1898 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1899 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1900 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1901 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1902 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1903 IEM_MC_ADVANCE_RIP_AND_FINISH();
1904 IEM_MC_END();
1905 }
1906 else
1907 {
1908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1909 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1910 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1911 IEM_MC_PREPARE_AVX_USAGE();
1912 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1913 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1914 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1915 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1916 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1917 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1918 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1919 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1920 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1921 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1922 IEM_MC_ADVANCE_RIP_AND_FINISH();
1923 IEM_MC_END();
1924 }
1925 }
1926 else
1927 {
1928 /*
1929 * Register, memory.
1930 */
1931 if (pVCpu->iem.s.uVexLength)
1932 {
1933 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1936 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1937 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1938 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1939 IEM_MC_PREPARE_AVX_USAGE();
1940
1941 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1942 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1943 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1944
1945 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1946 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1947 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1948 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1949 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1950 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1951 IEM_MC_LOCAL(RTUINT256U, uDst);
1952 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1953 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1954 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1955
1956 IEM_MC_ADVANCE_RIP_AND_FINISH();
1957 IEM_MC_END();
1958 }
1959 else
1960 {
1961 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1964 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1965 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1966 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1967 IEM_MC_PREPARE_AVX_USAGE();
1968
1969 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1970 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1971 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1972
1973 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1974 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1975 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1976 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1977 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1978 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1979 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1980 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1981
1982 IEM_MC_ADVANCE_RIP_AND_FINISH();
1983 IEM_MC_END();
1984 }
1985 }
1986}
1987
1988
1989/** Opcode VEX.66.0F3A 0x4a (vex only).
1990 * AVX, AVX */
1991FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
1992{
1993 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPS, vblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
1994 IEMOPBLENDOP_INIT_VARS(vblendvps);
1995 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1996}
1997
1998
1999/** Opcode VEX.66.0F3A 0x4b (vex only).
2000 * AVX, AVX */
2001FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
2002{
2003 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPD, vblendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
2004 IEMOPBLENDOP_INIT_VARS(vblendvpd);
2005 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2006}
2007
2008
2009/**
2010 * Common worker for AVX2 instructions on the forms:
2011 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
2012 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
2013 *
2014 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
2015 * Additionally, both VEX.W and VEX.L must be zero.
2016 */
2017FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
2018{
2019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2020 if (IEM_IS_MODRM_REG_MODE(bRm))
2021 {
2022 /*
2023 * Register, register.
2024 */
2025 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
2026 if (pVCpu->iem.s.uVexLength)
2027 {
2028 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2029 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
2030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2031 IEM_MC_PREPARE_AVX_USAGE();
2032
2033 IEM_MC_LOCAL(RTUINT256U, uSrc1);
2034 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
2035 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2036
2037 IEM_MC_LOCAL(RTUINT256U, uSrc2);
2038 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
2039 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2040
2041 IEM_MC_LOCAL(RTUINT256U, uSrc3);
2042 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
2043 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
2044
2045 IEM_MC_LOCAL(RTUINT256U, uDst);
2046 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
2047
2048 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
2049
2050 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2051 IEM_MC_ADVANCE_RIP_AND_FINISH();
2052 IEM_MC_END();
2053 }
2054 else
2055 {
2056 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2057 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
2058 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2059 IEM_MC_PREPARE_AVX_USAGE();
2060 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2061 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2062 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
2063 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2064 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
2065 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2066 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
2067 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
2068 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
2069 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2070 IEM_MC_ADVANCE_RIP_AND_FINISH();
2071 IEM_MC_END();
2072 }
2073 }
2074 else
2075 {
2076 /*
2077 * Register, memory.
2078 */
2079 if (pVCpu->iem.s.uVexLength)
2080 {
2081 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2082 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2083
2084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2085 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
2086
2087 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
2088 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2089 IEM_MC_PREPARE_AVX_USAGE();
2090
2091 IEM_MC_LOCAL(RTUINT256U, uSrc2);
2092 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
2093 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2094
2095 IEM_MC_LOCAL(RTUINT256U, uSrc1);
2096 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
2097 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2098
2099 IEM_MC_LOCAL(RTUINT256U, uSrc3);
2100 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
2101 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
2102
2103 IEM_MC_LOCAL(RTUINT256U, uDst);
2104 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
2105
2106 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
2107
2108 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2109 IEM_MC_ADVANCE_RIP_AND_FINISH();
2110 IEM_MC_END();
2111 }
2112 else
2113 {
2114 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2117 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
2118
2119 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
2120 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2121 IEM_MC_PREPARE_AVX_USAGE();
2122
2123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
2124 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
2125 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2126
2127 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2128 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2129 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
2130 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2131 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
2132 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
2133 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
2134 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2135
2136 IEM_MC_ADVANCE_RIP_AND_FINISH();
2137 IEM_MC_END();
2138 }
2139 }
2140}
2141
2142
2143/** Opcode VEX.66.0F3A 0x4c (vex only).
2144 * AVX, AVX2 */
2145FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
2146{
2147 /** @todo testcase: cover VEX.W=1 and check that it triggers \#UD on both real
2148 * and emulated hardware. */
2149 IEMOP_MNEMONIC4(VEX_RVMR, VPBLENDVB, vpblendvb, Vx_WO, Hx, Wx, Lx, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_W_ZERO);
2150 IEMOPBLENDOP_INIT_VARS(vpblendvb);
2151 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2152}
2153
2154
2155/* Opcode VEX.66.0F3A 0x4d - invalid */
2156/* Opcode VEX.66.0F3A 0x4e - invalid */
2157/* Opcode VEX.66.0F3A 0x4f - invalid */
2158
2159
2160/* Opcode VEX.66.0F3A 0x50 - invalid */
2161/* Opcode VEX.66.0F3A 0x51 - invalid */
2162/* Opcode VEX.66.0F3A 0x52 - invalid */
2163/* Opcode VEX.66.0F3A 0x53 - invalid */
2164/* Opcode VEX.66.0F3A 0x54 - invalid */
2165/* Opcode VEX.66.0F3A 0x55 - invalid */
2166/* Opcode VEX.66.0F3A 0x56 - invalid */
2167/* Opcode VEX.66.0F3A 0x57 - invalid */
2168/* Opcode VEX.66.0F3A 0x58 - invalid */
2169/* Opcode VEX.66.0F3A 0x59 - invalid */
2170/* Opcode VEX.66.0F3A 0x5a - invalid */
2171/* Opcode VEX.66.0F3A 0x5b - invalid */
2172/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
2173FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
2174/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
2175FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
2176/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
2177FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
2178/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
2179FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
2180
2181
2182/**
2183 * @opcode 0x60
2184 * @oppfx 0x66
2185 * @opflmodify cf,pf,af,zf,sf,of
2186 * @opflclear pf,af
2187 */
2188FNIEMOP_DEF(iemOp_vpcmpestrm_Vdq_Wdq_Ib)
2189{
2190 IEMOP_MNEMONIC3(VEX_RMI, VPCMPESTRM, vpcmpestrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2191
2192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2193 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2194 {
2195 if (IEM_IS_MODRM_REG_MODE(bRm))
2196 {
2197 /*
2198 * Register, register.
2199 */
2200 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2201 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2202 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2203 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2204 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2205 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2206 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2207 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2208 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2209 IEM_MC_PREPARE_SSE_USAGE();
2210 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2211 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2212 IEM_MC_REF_EFLAGS(pEFlags);
2213 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2214 iemAImpl_vpcmpestrm_u128,
2215 iemAImpl_vpcmpestrm_u128_fallback),
2216 puDst, pEFlags, pSrc, bImmArg);
2217 IEM_MC_ADVANCE_RIP_AND_FINISH();
2218 IEM_MC_END();
2219 }
2220 else
2221 {
2222 /*
2223 * Register, memory.
2224 */
2225 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2226 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2227 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2228 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2229 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2231
2232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2233 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2234 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2235 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2236 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2237 IEM_MC_PREPARE_SSE_USAGE();
2238
2239 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2240 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2241 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2242 IEM_MC_REF_EFLAGS(pEFlags);
2243 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2244 iemAImpl_vpcmpestrm_u128,
2245 iemAImpl_vpcmpestrm_u128_fallback),
2246 puDst, pEFlags, pSrc, bImmArg);
2247 IEM_MC_ADVANCE_RIP_AND_FINISH();
2248 IEM_MC_END();
2249 }
2250 }
2251 else
2252 {
2253 if (IEM_IS_MODRM_REG_MODE(bRm))
2254 {
2255 /*
2256 * Register, register.
2257 */
2258 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2259 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2260 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2261 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2262 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2263 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2264 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2265 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2266 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2267 IEM_MC_PREPARE_SSE_USAGE();
2268 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2269 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2270 IEM_MC_REF_EFLAGS(pEFlags);
2271 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
2272 iemAImpl_vpcmpestrm_u128,
2273 iemAImpl_vpcmpestrm_u128_fallback),
2274 puDst, pEFlags, pSrc, bImmArg);
2275 IEM_MC_ADVANCE_RIP_AND_FINISH();
2276 IEM_MC_END();
2277 }
2278 else
2279 {
2280 /*
2281 * Register, memory.
2282 */
2283 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2284 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2285 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2286 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2287 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2289
2290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2291 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2292 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2293 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2294 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2295 IEM_MC_PREPARE_SSE_USAGE();
2296
2297 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2298 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2299 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2300 IEM_MC_REF_EFLAGS(pEFlags);
2301 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2302 iemAImpl_vpcmpestrm_u128,
2303 iemAImpl_vpcmpestrm_u128_fallback),
2304 puDst, pEFlags, pSrc, bImmArg);
2305 IEM_MC_ADVANCE_RIP_AND_FINISH();
2306 IEM_MC_END();
2307 }
2308 }
2309}
2310
2311
2312/**
2313 * @opcode 0x61
2314 * @oppfx 0x66
2315 * @opflmodify cf,pf,af,zf,sf,of
2316 * @opflclear pf,af
2317 */
2318FNIEMOP_DEF(iemOp_vpcmpestri_Vdq_Wdq_Ib)
2319{
2320 IEMOP_MNEMONIC3(VEX_RMI, VPCMPESTRI, vpcmpestri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2321
2322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2323 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2324 {
2325 if (IEM_IS_MODRM_REG_MODE(bRm))
2326 {
2327 /*
2328 * Register, register.
2329 */
2330 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2331 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2332 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2333 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2334 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2335 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2336 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2337 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2339 IEM_MC_PREPARE_SSE_USAGE();
2340 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2341 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2342 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2343 IEM_MC_REF_EFLAGS(pEFlags);
2344 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2345 iemAImpl_vpcmpestri_u128,
2346 iemAImpl_vpcmpestri_u128_fallback),
2347 pu32Ecx, pEFlags, pSrc, bImmArg);
2348 /** @todo testcase: High dword of RCX cleared? */
2349 IEM_MC_ADVANCE_RIP_AND_FINISH();
2350 IEM_MC_END();
2351 }
2352 else
2353 {
2354 /*
2355 * Register, memory.
2356 */
2357 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2358 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2359 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2360 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2361 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2363
2364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2365 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2366 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2367 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2368 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2369 IEM_MC_PREPARE_SSE_USAGE();
2370
2371 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2372 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2373 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2374 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2375 IEM_MC_REF_EFLAGS(pEFlags);
2376 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2377 iemAImpl_vpcmpestri_u128,
2378 iemAImpl_vpcmpestri_u128_fallback),
2379 pu32Ecx, pEFlags, pSrc, bImmArg);
2380 /** @todo testcase: High dword of RCX cleared? */
2381 IEM_MC_ADVANCE_RIP_AND_FINISH();
2382 IEM_MC_END();
2383 }
2384 }
2385 else
2386 {
2387 if (IEM_IS_MODRM_REG_MODE(bRm))
2388 {
2389 /*
2390 * Register, register.
2391 */
2392 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2393 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2394 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2395 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2396 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2397 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2398 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2399 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2400 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2401 IEM_MC_PREPARE_SSE_USAGE();
2402 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2403 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2404 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2405 IEM_MC_REF_EFLAGS(pEFlags);
2406 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2407 iemAImpl_vpcmpestri_u128,
2408 iemAImpl_vpcmpestri_u128_fallback),
2409 pu32Ecx, pEFlags, pSrc, bImmArg);
2410 /** @todo testcase: High dword of RCX cleared? */
2411 IEM_MC_ADVANCE_RIP_AND_FINISH();
2412 IEM_MC_END();
2413 }
2414 else
2415 {
2416 /*
2417 * Register, memory.
2418 */
2419 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2420 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2421 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2422 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2423 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2425
2426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2427 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2428 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2429 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2430 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2431 IEM_MC_PREPARE_SSE_USAGE();
2432
2433 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2434 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2435 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2436 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2437 IEM_MC_REF_EFLAGS(pEFlags);
2438 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2439 iemAImpl_vpcmpestri_u128,
2440 iemAImpl_vpcmpestri_u128_fallback),
2441 pu32Ecx, pEFlags, pSrc, bImmArg);
2442 /** @todo testcase: High dword of RCX cleared? */
2443 IEM_MC_ADVANCE_RIP_AND_FINISH();
2444 IEM_MC_END();
2445 }
2446 }
2447}
2448
2449
2450/**
2451 * @opcode 0x62
2452 * @oppfx 0x66
2453 * @opflmodify cf,pf,af,zf,sf,of
2454 * @opflclear pf,af
2455 */
2456FNIEMOP_DEF(iemOp_vpcmpistrm_Vdq_Wdq_Ib)
2457{
2458 IEMOP_MNEMONIC3(VEX_RMI, VPCMPISTRM, vpcmpistrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2459
2460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2461 if (IEM_IS_MODRM_REG_MODE(bRm))
2462 {
2463 /*
2464 * Register, register.
2465 */
2466 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2467 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2468 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2469 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2470 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2471 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
2472 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
2473 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2474 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2475 IEM_MC_PREPARE_SSE_USAGE();
2476 IEM_MC_FETCH_XREG_PAIR_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2477 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2478 IEM_MC_REF_EFLAGS(pEFlags);
2479 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2480 iemAImpl_vpcmpistrm_u128,
2481 iemAImpl_vpcmpistrm_u128_fallback),
2482 puDst, pEFlags, pSrc, bImmArg);
2483 IEM_MC_ADVANCE_RIP_AND_FINISH();
2484 IEM_MC_END();
2485 }
2486 else
2487 {
2488 /*
2489 * Register, memory.
2490 */
2491 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2492 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2493 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2494 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
2495 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2499 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2500 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2501 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2502 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2503 IEM_MC_PREPARE_SSE_USAGE();
2504
2505 IEM_MC_FETCH_MEM_U128_AND_XREG_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2506 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2507 IEM_MC_REF_EFLAGS(pEFlags);
2508 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2509 iemAImpl_vpcmpistrm_u128,
2510 iemAImpl_vpcmpistrm_u128_fallback),
2511 puDst, pEFlags, pSrc, bImmArg);
2512 IEM_MC_ADVANCE_RIP_AND_FINISH();
2513 IEM_MC_END();
2514 }
2515}
2516
2517
2518/**
2519 * @opcode 0x63
2520 * @oppfx 0x66
2521 * @opflmodify cf,pf,af,zf,sf,of
2522 * @opflclear pf,af
2523 */
2524FNIEMOP_DEF(iemOp_vpcmpistri_Vdq_Wdq_Ib)
2525{
2526 IEMOP_MNEMONIC3(VEX_RMI, VPCMPISTRI, vpcmpistri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2527
2528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2529 if (IEM_IS_MODRM_REG_MODE(bRm))
2530 {
2531 /*
2532 * Register, register.
2533 */
2534 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2535 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2536 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2537 IEM_MC_ARG(uint32_t *, pEFlags, 0);
2538 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
2539 IEM_MC_ARG(PCRTUINT128U, pSrc2, 2);
2540 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2541 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2542 IEM_MC_PREPARE_SSE_USAGE();
2543 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2544 IEM_MC_REF_XREG_U128_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2545 IEM_MC_REF_EFLAGS(pEFlags);
2546 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
2547 IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2548 iemAImpl_vpcmpistri_u128,
2549 iemAImpl_vpcmpistri_u128_fallback),
2550 pEFlags, pSrc1, pSrc2, bImmArg);
2551 /** @todo testcase: High dword of RCX cleared? */
2552 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
2553 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2554
2555 IEM_MC_ADVANCE_RIP_AND_FINISH();
2556 IEM_MC_END();
2557 }
2558 else
2559 {
2560 /*
2561 * Register, memory.
2562 */
2563 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2564 IEM_MC_ARG(uint32_t *, pEFlags, 0);
2565 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
2566 IEM_MC_LOCAL(RTUINT128U, Src2);
2567 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc2, Src2, 2);
2568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2569
2570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2571 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2572 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2573 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2574 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2575 IEM_MC_PREPARE_SSE_USAGE();
2576
2577 IEM_MC_FETCH_MEM_U128(Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2578 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2579 IEM_MC_REF_EFLAGS(pEFlags);
2580 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
2581 IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2582 iemAImpl_vpcmpistri_u128,
2583 iemAImpl_vpcmpistri_u128_fallback),
2584 pEFlags, pSrc1, pSrc2, bImmArg);
2585 /** @todo testcase: High dword of RCX cleared? */
2586 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
2587 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2588 IEM_MC_ADVANCE_RIP_AND_FINISH();
2589 IEM_MC_END();
2590 }
2591}
2592
2593
2594/* Opcode VEX.66.0F3A 0x64 - invalid */
2595/* Opcode VEX.66.0F3A 0x65 - invalid */
2596/* Opcode VEX.66.0F3A 0x66 - invalid */
2597/* Opcode VEX.66.0F3A 0x67 - invalid */
2598/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
2599FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
2600/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
2601FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
2602/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
2603FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
2604/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
2605FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
2606/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
2607FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
2608/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
2609FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
2610/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
2611FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
2612/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
2613FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
2614
2615/* Opcode VEX.66.0F3A 0x70 - invalid */
2616/* Opcode VEX.66.0F3A 0x71 - invalid */
2617/* Opcode VEX.66.0F3A 0x72 - invalid */
2618/* Opcode VEX.66.0F3A 0x73 - invalid */
2619/* Opcode VEX.66.0F3A 0x74 - invalid */
2620/* Opcode VEX.66.0F3A 0x75 - invalid */
2621/* Opcode VEX.66.0F3A 0x76 - invalid */
2622/* Opcode VEX.66.0F3A 0x77 - invalid */
2623/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
2624FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
2625/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
2626FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
2627/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
2628FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
2629/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
2630FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
2631/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
2632FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
2633/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
2634FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
2635/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
2636FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
2637/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
2638FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
2639
2640/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
2641
2642
2643/* Opcode 0x0f 0xc0 - invalid */
2644/* Opcode 0x0f 0xc1 - invalid */
2645/* Opcode 0x0f 0xc2 - invalid */
2646/* Opcode 0x0f 0xc3 - invalid */
2647/* Opcode 0x0f 0xc4 - invalid */
2648/* Opcode 0x0f 0xc5 - invalid */
2649/* Opcode 0x0f 0xc6 - invalid */
2650/* Opcode 0x0f 0xc7 - invalid */
2651/* Opcode 0x0f 0xc8 - invalid */
2652/* Opcode 0x0f 0xc9 - invalid */
2653/* Opcode 0x0f 0xca - invalid */
2654/* Opcode 0x0f 0xcb - invalid */
2655/* Opcode 0x0f 0xcc - invalid */
2656/* Opcode 0x0f 0xcd - invalid */
2657/* Opcode 0x0f 0xce - invalid */
2658/* Opcode 0x0f 0xcf - invalid */
2659
2660
2661/* Opcode VEX.66.0F3A 0xd0 - invalid */
2662/* Opcode VEX.66.0F3A 0xd1 - invalid */
2663/* Opcode VEX.66.0F3A 0xd2 - invalid */
2664/* Opcode VEX.66.0F3A 0xd3 - invalid */
2665/* Opcode VEX.66.0F3A 0xd4 - invalid */
2666/* Opcode VEX.66.0F3A 0xd5 - invalid */
2667/* Opcode VEX.66.0F3A 0xd6 - invalid */
2668/* Opcode VEX.66.0F3A 0xd7 - invalid */
2669/* Opcode VEX.66.0F3A 0xd8 - invalid */
2670/* Opcode VEX.66.0F3A 0xd9 - invalid */
2671/* Opcode VEX.66.0F3A 0xda - invalid */
2672/* Opcode VEX.66.0F3A 0xdb - invalid */
2673/* Opcode VEX.66.0F3A 0xdc - invalid */
2674/* Opcode VEX.66.0F3A 0xdd - invalid */
2675/* Opcode VEX.66.0F3A 0xde - invalid */
2676
2677
2678/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
2679FNIEMOP_DEF(iemOp_vaeskeygen_Vdq_Wdq_Ib)
2680{
2681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2682 if (IEM_IS_MODRM_REG_MODE(bRm))
2683 {
2684 /*
2685 * Register, register.
2686 */
2687 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2688 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2689 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2690 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2691 IEM_MC_PREPARE_AVX_USAGE();
2692 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2693 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2694 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
2695 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2696 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2697 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback),
2698 puDst, puSrc, bImmArg);
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 /*
2705 * Register, memory.
2706 */
2707 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2710 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2711 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
2712 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2713
2714 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2715 IEM_MC_PREPARE_AVX_USAGE();
2716 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2717 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2718 IEM_MC_LOCAL(RTUINT128U, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2720 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2721 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback),
2722 puDst, puSrc, bImmArg);
2723 IEM_MC_ADVANCE_RIP_AND_FINISH();
2724 IEM_MC_END();
2725 }
2726}
2727
2728
2729/**
2730 * @opcode 0xf0
2731 * @oppfx 0xf2
2732 * @opflclass unchanged
2733 */
2734FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
2735{
2736 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
2737 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2739 if (IEM_IS_MODRM_REG_MODE(bRm))
2740 {
2741 /*
2742 * Register, register.
2743 */
2744 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2745 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2746 {
2747 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2748 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2749 IEM_MC_ARG(uint64_t *, pDst, 0);
2750 IEM_MC_ARG(uint64_t, uSrc1, 1);
2751 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
2752 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
2753 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2754 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
2755 IEM_MC_ADVANCE_RIP_AND_FINISH();
2756 IEM_MC_END();
2757 }
2758 else
2759 {
2760 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2761 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2762 IEM_MC_ARG(uint32_t *, pDst, 0);
2763 IEM_MC_ARG(uint32_t, uSrc1, 1);
2764 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
2765 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
2766 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2767 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
2768 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2769 IEM_MC_ADVANCE_RIP_AND_FINISH();
2770 IEM_MC_END();
2771 }
2772 }
2773 else
2774 {
2775 /*
2776 * Register, memory.
2777 */
2778 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2779 {
2780 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2781 IEM_MC_ARG(uint64_t *, pDst, 0);
2782 IEM_MC_ARG(uint64_t, uSrc1, 1);
2783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2785 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2786 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
2787 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2788 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2789 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2790 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
2791 IEM_MC_ADVANCE_RIP_AND_FINISH();
2792 IEM_MC_END();
2793 }
2794 else
2795 {
2796 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2797 IEM_MC_ARG(uint32_t *, pDst, 0);
2798 IEM_MC_ARG(uint32_t, uSrc1, 1);
2799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2801 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2802 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
2803 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2804 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2805 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2806 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
2807 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2808 IEM_MC_ADVANCE_RIP_AND_FINISH();
2809 IEM_MC_END();
2810 }
2811 }
2812}
2813
2814
2815/**
2816 * VEX opcode map \#3.
2817 *
2818 * @sa g_apfnThreeByte0f3a
2819 */
2820const PFNIEMOP g_apfnVexMap3[] =
2821{
2822 /* no prefix, 066h prefix f3h prefix, f2h prefix */
2823 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2824 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermpd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2825 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2826 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2827 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2828 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2829 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2830 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2831 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2832 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2833 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2834 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2835 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2836 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2837 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2838 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2839
2840 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2841 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2842 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2843 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2844 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_Eb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2845 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_Ew_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2846 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_Ey_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2847 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2848 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2849 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2850 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2851 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2852 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2853 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2854 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2855 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2856
2857 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2858 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2859 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2860 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2861 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2862 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2863 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2864 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2865 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2866 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2867 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2868 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2869 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2870 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2871 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2872 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2873
2874 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2875 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2876 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2877 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2878 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2879 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2880 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2881 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2882 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2883 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2884 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2885 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2886 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2887 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2888 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2889 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2890
2891 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2892 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2893 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2894 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2895 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2896 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2897 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2898 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2899 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2900 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2901 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2902 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2903 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2904 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2905 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2906 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2907
2908 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2909 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2910 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2911 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2912 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2913 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2914 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2915 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2916 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2917 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2918 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2919 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2920 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2921 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2922 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2923 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2924
2925 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2926 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2927 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2928 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2929 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2930 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2931 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2932 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2933 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2934 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2935 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2936 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2937 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2938 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2939 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2940 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2941
2942 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2943 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2944 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2945 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2946 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2947 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2948 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2949 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2950 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2951 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2952 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2953 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2954 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2955 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2956 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2957 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2958
2959 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2960 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2961 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2962 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2963 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2964 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2965 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2966 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2967 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2968 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2969 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2970 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2971 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2972 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2973 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2974 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2975
2976 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2977 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2978 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2979 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2980 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2981 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2982 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2983 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2984 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2985 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2986 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2987 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2988 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2989 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2990 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2991 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2992
2993 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2994 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2995 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2996 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2997 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2998 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2999 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3000 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3001 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3002 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3003 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3004 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3005 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3006 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3007 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3008 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3009
3010 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3011 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3012 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3013 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3014 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3015 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3016 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3017 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3018 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3019 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3020 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3021 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3022 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3023 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3024 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3025 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3026
3027 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3028 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3029 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3030 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3031 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3032 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3033 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3034 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3035 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3036 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3037 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3038 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3039 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3040 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3041 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3042 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3043
3044 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3045 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3046 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3047 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3048 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3049 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3050 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3051 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3052 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3053 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3054 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3055 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3056 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3057 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3058 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3059 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
3060
3061 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3062 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3063 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3064 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3065 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3066 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3067 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3068 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3069 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3070 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3071 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3072 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3073 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3074 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3075 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3076 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3077
3078 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
3079 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3080 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3081 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3082 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3083 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3084 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3085 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3086 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3087 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3088 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3089 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3090 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3091 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3092 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3093 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3094};
3095AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
3096
3097/** @} */
3098
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette