VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h@ 105318

Last change on this file since 105318 was 105306, checked in by vboxsync, 7 months ago

VMM/IEM: Implement vpermq, vpermpd instruction emulations, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 125.5 KB
Line 
1/* $Id: IEMAllInstVexMap3.cpp.h 105306 2024-07-12 13:20:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
56 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
57 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
58 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
59 IEM_MC_PREPARE_AVX_USAGE();
60
61 IEM_MC_LOCAL(RTUINT256U, uDst);
62 IEM_MC_LOCAL(RTUINT256U, uSrc1);
63 IEM_MC_LOCAL(RTUINT256U, uSrc2);
64 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
65 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
66 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
67 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
68 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
69 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
70 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
71 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
72 IEM_MC_ADVANCE_RIP_AND_FINISH();
73 IEM_MC_END();
74 }
75 else
76 {
77 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
78 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
79 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
80 IEM_MC_ARG(PRTUINT128U, puDst, 0);
81 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
82 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
83 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
84 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
85 IEM_MC_PREPARE_AVX_USAGE();
86 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
87 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
88 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
89 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
90 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94 }
95 else
96 {
97 /*
98 * Register, memory.
99 */
100 if (pVCpu->iem.s.uVexLength)
101 {
102 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
103 IEM_MC_LOCAL(RTUINT256U, uDst);
104 IEM_MC_LOCAL(RTUINT256U, uSrc1);
105 IEM_MC_LOCAL(RTUINT256U, uSrc2);
106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
107 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
109 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
110
111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
113 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
114 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
116 IEM_MC_PREPARE_AVX_USAGE();
117
118 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
119 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
120 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
121 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
122
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
129 IEM_MC_LOCAL(RTUINT128U, uSrc2);
130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
132 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
134
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
136 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
137 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
138 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
140 IEM_MC_PREPARE_AVX_USAGE();
141
142 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
143 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
144 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
145 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
146 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
147
148 IEM_MC_ADVANCE_RIP_AND_FINISH();
149 IEM_MC_END();
150 }
151 }
152}
153
154
155/**
156 * Common worker for AVX instructions on the forms:
157 * - vxxxp{s,d} xmm0, xmm1/mem128, imm8
158 * - vxxxp{s,d} ymm0, ymm1/mem256, imm8
159 *
160 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
161 */
162FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib, PCIEMOPMEDIAF2IMM8, pImpl)
163{
164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
165 if (IEM_IS_MODRM_REG_MODE(bRm))
166 {
167 /*
168 * Register, register.
169 */
170 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
171 if (pVCpu->iem.s.uVexLength)
172 {
173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
176 IEM_MC_PREPARE_AVX_USAGE();
177 IEM_MC_LOCAL(X86YMMREG, uDst);
178 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
179 IEM_MC_LOCAL(X86YMMREG, uSrc);
180 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
181 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
183 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
185 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
186 IEM_MC_ADVANCE_RIP_AND_FINISH();
187 IEM_MC_END();
188 }
189 else
190 {
191 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
192 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
193 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
194 IEM_MC_PREPARE_AVX_USAGE();
195 IEM_MC_LOCAL(X86XMMREG, uDst);
196 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
197 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
198 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
199 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
200 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
201 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
202 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
203 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
204 IEM_MC_ADVANCE_RIP_AND_FINISH();
205 IEM_MC_END();
206 }
207 }
208 else
209 {
210 /*
211 * Register, memory.
212 */
213 if (pVCpu->iem.s.uVexLength)
214 {
215 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
218 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
219 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
220 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
221 IEM_MC_PREPARE_AVX_USAGE();
222 IEM_MC_LOCAL(X86YMMREG, uDst);
223 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
224 IEM_MC_LOCAL(X86YMMREG, uSrc);
225 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
226 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
227 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
228 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
229 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
230 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
231 IEM_MC_ADVANCE_RIP_AND_FINISH();
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
239 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
240 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
242 IEM_MC_PREPARE_AVX_USAGE();
243 IEM_MC_LOCAL(X86XMMREG, uDst);
244 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
245 IEM_MC_LOCAL(X86XMMREG, uSrc);
246 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
247 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
248 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
249 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
250 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
251 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
252 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
253 IEM_MC_ADVANCE_RIP_AND_FINISH();
254 IEM_MC_END();
255 }
256 }
257}
258
259
260/**
261 * Common worker for AVX instructions on the forms:
262 * - vpermilps/d xmm0, xmm1/mem128, imm8
263 * - vpermilps/d ymm0, ymm1/mem256, imm8
264 *
265 * Takes function table for function w/o implicit state parameter.
266 *
267 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
268 */
269FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF2IMM8, pImpl)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
278 if (pVCpu->iem.s.uVexLength)
279 {
280 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
281 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
282 IEM_MC_LOCAL(RTUINT256U, uDst);
283 IEM_MC_LOCAL(RTUINT256U, uSrc);
284 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
285 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
286 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
287 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
288 IEM_MC_PREPARE_AVX_USAGE();
289 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
291 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
292 IEM_MC_ADVANCE_RIP_AND_FINISH();
293 IEM_MC_END();
294 }
295 else
296 {
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
299 IEM_MC_ARG(PRTUINT128U, puDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
301 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
302 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
303 IEM_MC_PREPARE_AVX_USAGE();
304 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
305 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
307 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
308 IEM_MC_ADVANCE_RIP_AND_FINISH();
309 IEM_MC_END();
310 }
311 }
312 else
313 {
314 /*
315 * Register, memory.
316 */
317 if (pVCpu->iem.s.uVexLength)
318 {
319 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
320 IEM_MC_LOCAL(RTUINT256U, uDst);
321 IEM_MC_LOCAL(RTUINT256U, uSrc);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
324 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
325
326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
327 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
328 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
329 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
331 IEM_MC_PREPARE_AVX_USAGE();
332
333 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
335 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
336
337 IEM_MC_ADVANCE_RIP_AND_FINISH();
338 IEM_MC_END();
339 }
340 else
341 {
342 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
343 IEM_MC_LOCAL(RTUINT128U, uSrc);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG(PRTUINT128U, puDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
347
348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
349 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
350 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
351 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
353 IEM_MC_PREPARE_AVX_USAGE();
354
355 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
356 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
357 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
358 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
359
360 IEM_MC_ADVANCE_RIP_AND_FINISH();
361 IEM_MC_END();
362 }
363 }
364}
365
366
367/**
368 * Common worker for AVX instructions on the forms:
369 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
370 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
371 *
372 * Takes function table for function w/o implicit state parameter.
373 *
374 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
375 */
376FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
377{
378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
379 if (IEM_IS_MODRM_REG_MODE(bRm))
380 {
381 /*
382 * Register, register.
383 */
384 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
385 if (pVCpu->iem.s.uVexLength)
386 {
387 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
388 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
389 IEM_MC_LOCAL(RTUINT256U, uDst);
390 IEM_MC_LOCAL(RTUINT256U, uSrc1);
391 IEM_MC_LOCAL(RTUINT256U, uSrc2);
392 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
393 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
394 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
395 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
396 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
397 IEM_MC_PREPARE_AVX_USAGE();
398 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
399 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
400 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
401 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
402 IEM_MC_ADVANCE_RIP_AND_FINISH();
403 IEM_MC_END();
404 }
405 else
406 {
407 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
408 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
409 IEM_MC_ARG(PRTUINT128U, puDst, 0);
410 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
411 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
412 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
413 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
414 IEM_MC_PREPARE_AVX_USAGE();
415 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
416 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
417 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
418 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
419 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
420 IEM_MC_ADVANCE_RIP_AND_FINISH();
421 IEM_MC_END();
422 }
423 }
424 else
425 {
426 /*
427 * Register, memory.
428 */
429 if (pVCpu->iem.s.uVexLength)
430 {
431 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_LOCAL(RTUINT256U, uDst);
433 IEM_MC_LOCAL(RTUINT256U, uSrc1);
434 IEM_MC_LOCAL(RTUINT256U, uSrc2);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
437 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
438 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
439
440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
441 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
442 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
443 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
444 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
445 IEM_MC_PREPARE_AVX_USAGE();
446
447 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
448 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
449 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451
452 IEM_MC_ADVANCE_RIP_AND_FINISH();
453 IEM_MC_END();
454 }
455 else
456 {
457 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
458 IEM_MC_LOCAL(RTUINT128U, uSrc2);
459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
460 IEM_MC_ARG(PRTUINT128U, puDst, 0);
461 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
462 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
463
464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
465 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
466 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
467 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
468 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
469 IEM_MC_PREPARE_AVX_USAGE();
470
471 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
474 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
475 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
476
477 IEM_MC_ADVANCE_RIP_AND_FINISH();
478 IEM_MC_END();
479 }
480 }
481}
482
483
484/** Opcode VEX.66.0F3A 0x00. */
485FNIEMOP_DEF(iemOp_vpermq_Vqq_Wqq_Ib)
486{
487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
488 if (IEM_IS_MODRM_REG_MODE(bRm))
489 {
490 /*
491 * Register, register.
492 */
493 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
494 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
495 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
496 IEM_MC_LOCAL(RTUINT256U, uDst);
497 IEM_MC_LOCAL(RTUINT256U, uSrc);
498 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
499 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
500 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
501 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
502 IEM_MC_PREPARE_AVX_USAGE();
503 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
504 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermq_u256, iemAImpl_vpermq_u256_fallback),
505 puDst, puSrc, bImmArg);
506 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
507 IEM_MC_ADVANCE_RIP_AND_FINISH();
508 IEM_MC_END();
509 }
510 else
511 {
512 /*
513 * Register, memory.
514 */
515 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
518 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
519 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
520 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
521 IEM_MC_PREPARE_AVX_USAGE();
522
523 IEM_MC_LOCAL(RTUINT256U, uSrc);
524 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
525 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
526 IEM_MC_LOCAL(RTUINT256U, uDst);
527 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
528 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
529 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermq_u256, iemAImpl_vpermq_u256_fallback),
530 puDst, puSrc, bImmArg);
531 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
532 IEM_MC_ADVANCE_RIP_AND_FINISH();
533 IEM_MC_END();
534 }
535}
536
537
538/** Opcode VEX.66.0F3A 0x01. */
539FNIEMOP_DEF(iemOp_vpermpd_Vqq_Wqq_Ib)
540{
541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
542 if (IEM_IS_MODRM_REG_MODE(bRm))
543 {
544 /*
545 * Register, register.
546 */
547 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
548 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
549 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
550 IEM_MC_LOCAL(RTUINT256U, uDst);
551 IEM_MC_LOCAL(RTUINT256U, uSrc);
552 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
553 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
554 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
555 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
556 IEM_MC_PREPARE_AVX_USAGE();
557 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
558 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermpd_u256, iemAImpl_vpermpd_u256_fallback),
559 puDst, puSrc, bImmArg);
560 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
561 IEM_MC_ADVANCE_RIP_AND_FINISH();
562 IEM_MC_END();
563 }
564 else
565 {
566 /*
567 * Register, memory.
568 */
569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
572 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
573 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
575 IEM_MC_PREPARE_AVX_USAGE();
576
577 IEM_MC_LOCAL(RTUINT256U, uSrc);
578 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
579 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
580 IEM_MC_LOCAL(RTUINT256U, uDst);
581 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
582 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
583 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermpd_u256, iemAImpl_vpermpd_u256_fallback),
584 puDst, puSrc, bImmArg);
585 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
586 IEM_MC_ADVANCE_RIP_AND_FINISH();
587 IEM_MC_END();
588 }
589}
590
591
592/** Opcode VEX.66.0F3A 0x02.
593 * AVX2,AVX2 */
594FNIEMOP_DEF(iemOp_vpblendd_Vx_Hx_Wx_Ib)
595{
596 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDD, vpblendd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
597 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendd);
598 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
599}
600
601
602/* Opcode VEX.66.0F3A 0x03 - invalid */
603
604
605/** Opcode VEX.66.0F3A 0x04.
606 * AVX,AVX */
607FNIEMOP_DEF(iemOp_vpermilps_Vx_Wx_Ib)
608{
609 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPS, vpermilps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
610 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilps);
611 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
612}
613
614
615/** Opcode VEX.66.0F3A 0x05.
616 * AVX,AVX */
617FNIEMOP_DEF(iemOp_vpermilpd_Vx_Wx_Ib)
618{
619 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPD, vpermilpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
620 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilpd);
621 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
622}
623
624
625/** Opcode VEX.66.0F3A 0x06 (vex only) */
626FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib)
627{
628 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2F128, vperm2f128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
630 if (IEM_IS_MODRM_REG_MODE(bRm))
631 {
632 /*
633 * Register, register.
634 */
635 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
636 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
637 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
638 IEM_MC_LOCAL(RTUINT256U, uDst);
639 IEM_MC_LOCAL(RTUINT256U, uSrc1);
640 IEM_MC_LOCAL(RTUINT256U, uSrc2);
641 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
642 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
643 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
644 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
645 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
646 IEM_MC_PREPARE_AVX_USAGE();
647 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
648 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
649 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
650 puDst, puSrc1, puSrc2, bImmArg);
651 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
652 IEM_MC_ADVANCE_RIP_AND_FINISH();
653 IEM_MC_END();
654 }
655 else
656 {
657 /*
658 * Register, memory.
659 */
660 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
661 IEM_MC_LOCAL(RTUINT256U, uDst);
662 IEM_MC_LOCAL(RTUINT256U, uSrc1);
663 IEM_MC_LOCAL(RTUINT256U, uSrc2);
664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
665 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
666 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
668
669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
670 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
671 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
672 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
673 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
674 IEM_MC_PREPARE_AVX_USAGE();
675
676 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
677 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
678 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
679 puDst, puSrc1, puSrc2, bImmArg);
680 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
681
682 IEM_MC_ADVANCE_RIP_AND_FINISH();
683 IEM_MC_END();
684 }
685}
686
687
688/* Opcode VEX.66.0F3A 0x07 - invalid */
689
690
691/** Opcode VEX.66.0F3A 0x08. */
692FNIEMOP_DEF(iemOp_vroundps_Vx_Wx_Ib)
693{
694 IEMOP_MNEMONIC3(VEX_RMI, VROUNDPS, vroundps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
695 IEMOPMEDIAF2IMM8_INIT_VARS( vroundps);
696 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
697}
698
699
700/** Opcode VEX.66.0F3A 0x09. */
701FNIEMOP_DEF(iemOp_vroundpd_Vx_Wx_Ib)
702{
703 IEMOP_MNEMONIC3(VEX_RMI, VROUNDPD, vroundpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
704 IEMOPMEDIAF2IMM8_INIT_VARS( vroundpd);
705 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
706}
707
708
709/** Opcode VEX.66.0F3A 0x0a. */
710FNIEMOP_DEF(iemOp_vroundss_Vss_Wss_Ib)
711{
712 IEMOP_MNEMONIC4(VEX_RVMI, VROUNDSS, vroundss, Vps, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
714 if (IEM_IS_MODRM_REG_MODE(bRm))
715 {
716 /*
717 * XMM32, XMM32.
718 */
719 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
720 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
721 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
722 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
723 IEM_MC_PREPARE_AVX_USAGE();
724 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
725 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
726 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
727 IEM_MC_LOCAL(X86XMMREG, uDst);
728 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
729 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
730 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback),
731 puDst, pSrc, bImmArg);
732 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
733 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
734 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
735 IEM_MC_ADVANCE_RIP_AND_FINISH();
736 IEM_MC_END();
737 }
738 else
739 {
740 /*
741 * XMM32, [mem32].
742 */
743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
746 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
747 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
748 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
749 IEM_MC_PREPARE_AVX_USAGE();
750 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
751 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
752 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
753 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu),
754 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
755 IEM_MC_LOCAL(X86XMMREG, uDst);
756 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
757 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback),
758 puDst, pSrc, bImmArg);
759 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
760 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
761 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
762 IEM_MC_ADVANCE_RIP_AND_FINISH();
763 IEM_MC_END();
764 }
765}
766
767
768/** Opcode VEX.66.0F3A 0x0b. */
769FNIEMOP_DEF(iemOp_vroundsd_Vsd_Wsd_Ib)
770{
771 IEMOP_MNEMONIC4(VEX_RVMI, VROUNDSD, vroundsd, Vpd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 if (IEM_IS_MODRM_REG_MODE(bRm))
774 {
775 /*
776 * XMM64, XMM64.
777 */
778 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
779 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
780 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
781 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
782 IEM_MC_PREPARE_AVX_USAGE();
783 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
784 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
785 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
786 IEM_MC_LOCAL(X86XMMREG, uDst);
787 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
788 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
789 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback),
790 puDst, pSrc, bImmArg);
791 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
792 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
793 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
794 IEM_MC_ADVANCE_RIP_AND_FINISH();
795 IEM_MC_END();
796 }
797 else
798 {
799 /*
800 * XMM64, [mem64].
801 */
802 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
805 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
806 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
807 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
808 IEM_MC_PREPARE_AVX_USAGE();
809 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
810 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
811 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
812 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu),
813 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
814 IEM_MC_LOCAL(X86XMMREG, uDst);
815 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
816 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback),
817 puDst, pSrc, bImmArg);
818 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
819 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
820 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
821 IEM_MC_ADVANCE_RIP_AND_FINISH();
822 IEM_MC_END();
823 }
824}
825
826
827/** Opcode VEX.66.0F3A 0x0c.
828 * AVX,AVX */
829FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
830{
831 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPS, vblendps, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
832 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
833 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
834}
835
836
837/** Opcode VEX.66.0F3A 0x0d.
838 * AVX,AVX */
839FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
840{
841 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPD, vblendpd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
842 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
843 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
844}
845
846
847/** Opcode VEX.66.0F3A 0x0e.
848 * AVX,AVX2 */
849FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
850{
851 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDW, vpblendw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
852 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
853 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
854}
855
856
857/** Opcode VEX.0F3A 0x0f - invalid. */
858
859
860/** Opcode VEX.66.0F3A 0x0f.
861 * AVX,AVX2 */
862FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
863{
864 IEMOP_MNEMONIC4(VEX_RVMI, VPALIGNR, vpalignr, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
865 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
866 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
867}
868
869
870/* Opcode VEX.66.0F3A 0x10 - invalid */
871/* Opcode VEX.66.0F3A 0x11 - invalid */
872/* Opcode VEX.66.0F3A 0x12 - invalid */
873/* Opcode VEX.66.0F3A 0x13 - invalid */
874
875
876/** Opcode VEX.66.0F3A 0x14 - vpextrb Eb, Vdq, Ib */
877FNIEMOP_DEF(iemOp_vpextrb_Eb_Vdq_Ib)
878{
879 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRB, vpextrb, Eb, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
881 if (IEM_IS_MODRM_REG_MODE(bRm))
882 {
883 /*
884 * greg32, XMM, imm8.
885 */
886 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
887 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
888 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
889 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
890 IEM_MC_PREPARE_AVX_USAGE();
891
892 IEM_MC_LOCAL(uint8_t, uValue);
893 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
894 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
895 IEM_MC_ADVANCE_RIP_AND_FINISH();
896 IEM_MC_END();
897 }
898 else
899 {
900 /*
901 * [mem8], XMM, imm8.
902 */
903 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
904 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
907
908 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
909 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
910 IEM_MC_PREPARE_AVX_USAGE();
911
912 IEM_MC_LOCAL(uint8_t, uValue);
913 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
914 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
915 IEM_MC_ADVANCE_RIP_AND_FINISH();
916 IEM_MC_END();
917 }
918}
919
920
921/** Opcode VEX.66.0F3A 0x15 - vpextrw Ew, Vdq, Ib */
922FNIEMOP_DEF(iemOp_vpextrw_Ew_Vdq_Ib)
923{
924 /** @todo testcase: check that this ignores VEX.W. */
925 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRW, vpextrw, Ew_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
927 if (IEM_IS_MODRM_REG_MODE(bRm))
928 {
929 /*
930 * greg32, XMM, imm8.
931 */
932 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
933 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
934 IEM_MC_LOCAL(uint16_t, uValue);
935
936 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
937 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
938 IEM_MC_PREPARE_AVX_USAGE();
939
940 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
941 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
942 IEM_MC_ADVANCE_RIP_AND_FINISH();
943 IEM_MC_END();
944 }
945 else
946 {
947 /*
948 * [mem16], XMM, imm8.
949 */
950 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
951 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
952 IEM_MC_LOCAL(uint16_t, uValue);
953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
955
956 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
958 IEM_MC_PREPARE_AVX_USAGE();
959
960 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
961 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
962 IEM_MC_ADVANCE_RIP_AND_FINISH();
963 IEM_MC_END();
964 }
965}
966
967
968/** Opcode VEX.66.0F3A 0x16 - vpextrd / vpextrq Eq / Ey, Vdq, Ib */
969FNIEMOP_DEF(iemOp_vpextrd_q_Ey_Vdq_Ib)
970{
971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
972 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
973 {
974 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRQ, vpextrq, Eq_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ONE);
975 if (IEM_IS_MODRM_REG_MODE(bRm))
976 {
977 /*
978 * greg64, XMM, imm8.
979 */
980 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
982 IEM_MC_LOCAL(uint64_t, uValue);
983
984 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
985 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
986 IEM_MC_PREPARE_AVX_USAGE();
987
988 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
989 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
990 IEM_MC_ADVANCE_RIP_AND_FINISH();
991 IEM_MC_END();
992 }
993 else
994 {
995 /*
996 * [mem64], XMM, imm8.
997 */
998 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
999 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1000 IEM_MC_LOCAL(uint64_t, uValue);
1001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1003
1004 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1005 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1006 IEM_MC_PREPARE_AVX_USAGE();
1007
1008 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1009 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
1010 IEM_MC_ADVANCE_RIP_AND_FINISH();
1011 IEM_MC_END();
1012 }
1013 }
1014 else
1015 {
1016 /**
1017 * @opdone
1018 */
1019 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRD, vpextrd, Ey_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ZERO);
1020 if (IEM_IS_MODRM_REG_MODE(bRm))
1021 {
1022 /*
1023 * greg32, XMM, imm8.
1024 */
1025 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1026 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1027 IEM_MC_LOCAL(uint32_t, uValue);
1028
1029 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1031 IEM_MC_PREPARE_AVX_USAGE();
1032
1033 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
1034 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * [mem32], XMM, imm8.
1042 */
1043 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1044 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1045 IEM_MC_LOCAL(uint32_t, uValue);
1046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1048
1049 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1050 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1051 IEM_MC_PREPARE_AVX_USAGE();
1052
1053 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
1054 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
1055 IEM_MC_ADVANCE_RIP_AND_FINISH();
1056 IEM_MC_END();
1057 }
1058 }
1059}
1060
1061
1062/** Opcode VEX.66.0F3A 0x17. */
1063FNIEMOP_DEF(iemOp_vextractps_Ed_Vdq_Ib)
1064{
1065 //IEMOP_MNEMONIC3(VEX_MRI_REG, VEXTRACTPS, vextractps, Ed, Vdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO);
1066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1067 if (IEM_IS_MODRM_REG_MODE(bRm))
1068 {
1069 /*
1070 * greg32, XMM, imm8.
1071 */
1072 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1073 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1074 IEM_MC_LOCAL(uint32_t, uSrc);
1075
1076 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1077 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1078 IEM_MC_PREPARE_AVX_USAGE();
1079
1080 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
1081 IEM_MC_STORE_GREG_U32( IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
1082 IEM_MC_ADVANCE_RIP_AND_FINISH();
1083 IEM_MC_END();
1084 }
1085 else
1086 {
1087 /*
1088 * [mem32], XMM, imm8.
1089 */
1090 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1091 IEM_MC_LOCAL(uint32_t, uSrc);
1092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1093
1094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1095 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1096 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1097 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1098 IEM_MC_PREPARE_AVX_USAGE();
1099
1100 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
1101 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1102 IEM_MC_ADVANCE_RIP_AND_FINISH();
1103 IEM_MC_END();
1104 }
1105}
1106
1107
1108/** Opcode VEX.66.0F3A 0x18 (vex only). */
1109FNIEMOP_DEF(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib)
1110{
1111 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTF128, vinsertf128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1113 if (IEM_IS_MODRM_REG_MODE(bRm))
1114 {
1115 /*
1116 * Register, register.
1117 */
1118 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1119 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1120 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1121 IEM_MC_LOCAL(RTUINT128U, uSrc);
1122
1123 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1124 IEM_MC_PREPARE_AVX_USAGE();
1125
1126 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1127 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1128 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1129
1130 IEM_MC_ADVANCE_RIP_AND_FINISH();
1131 IEM_MC_END();
1132 }
1133 else
1134 {
1135 /*
1136 * Register, memory.
1137 */
1138 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1139 IEM_MC_LOCAL(RTUINT128U, uSrc);
1140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1141
1142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1143 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1144 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1145 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1146 IEM_MC_PREPARE_AVX_USAGE();
1147
1148 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1149 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1150 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1151
1152 IEM_MC_ADVANCE_RIP_AND_FINISH();
1153 IEM_MC_END();
1154 }
1155}
1156
1157
1158/** Opcode VEX.66.0F3A 0x19 (vex only). */
1159FNIEMOP_DEF(iemOp_vextractf128_Wdq_Vqq_Ib)
1160{
1161 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTF128, vextractf128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
1162 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1163 if (IEM_IS_MODRM_REG_MODE(bRm))
1164 {
1165 /*
1166 * Register, register.
1167 */
1168 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1169 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1170 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1171
1172 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1173 IEM_MC_PREPARE_AVX_USAGE();
1174
1175 IEM_MC_LOCAL(RTUINT128U, uDst);
1176 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1177 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
1178
1179 IEM_MC_ADVANCE_RIP_AND_FINISH();
1180 IEM_MC_END();
1181 }
1182 else
1183 {
1184 /*
1185 * Register, memory.
1186 */
1187 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1189
1190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1191 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1192 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1193 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1194 IEM_MC_PREPARE_AVX_USAGE();
1195
1196 IEM_MC_LOCAL(RTUINT128U, uDst);
1197 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1198 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
1199
1200 IEM_MC_ADVANCE_RIP_AND_FINISH();
1201 IEM_MC_END();
1202 }
1203}
1204
1205
1206/* Opcode VEX.66.0F3A 0x1a - invalid */
1207/* Opcode VEX.66.0F3A 0x1b - invalid */
1208/* Opcode VEX.66.0F3A 0x1c - invalid */
1209/** Opcode VEX.66.0F3A 0x1d (vex only). */
1210FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
1211/* Opcode VEX.66.0F3A 0x1e - invalid */
1212/* Opcode VEX.66.0F3A 0x1f - invalid */
1213
1214
1215/** Opcode VEX.66.0F3A 0x20. */
1216FNIEMOP_DEF(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib)
1217{
1218 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRB, vpinsrb, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1220 if (IEM_IS_MODRM_REG_MODE(bRm))
1221 {
1222 /*
1223 * Register, register.
1224 */
1225 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1226 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1227 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1228 IEM_MC_LOCAL(uint8_t, uValue);
1229
1230 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1231 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1232 IEM_MC_PREPARE_AVX_USAGE();
1233
1234 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1235 IEM_MC_FETCH_GREG_U8(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1236 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1237 IEM_MC_STORE_XREG_U8( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15, uValue);
1238 IEM_MC_ADVANCE_RIP_AND_FINISH();
1239 IEM_MC_END();
1240 }
1241 else
1242 {
1243 /*
1244 * Register, memory.
1245 */
1246 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1248 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1249 IEM_MC_LOCAL(uint8_t, uValue);
1250
1251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1252 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1253 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1255 IEM_MC_PREPARE_AVX_USAGE();
1256
1257 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1258 IEM_MC_FETCH_MEM_U8(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1259 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1260 IEM_MC_STORE_XREG_U8( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15, uValue);
1261 IEM_MC_ADVANCE_RIP_AND_FINISH();
1262 IEM_MC_END();
1263 }
1264}
1265
1266
1267/** Opcode VEX.66.0F3A 0x21, */
1268FNIEMOP_DEF(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib)
1269{
1270 //IEMOP_MNEMONIC4(VEX_RVMR_REG, VINSERTPS, vinsertps, Vdq, Hdq, UdqMd, Ib, DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO); /// @todo
1271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1272 if (IEM_IS_MODRM_REG_MODE(bRm))
1273 {
1274 /*
1275 * XMM, XMM, XMM, imm8.
1276 */
1277 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1278 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1279 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1280 IEM_MC_LOCAL(uint32_t, uSrc2);
1281
1282 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1283 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1284 IEM_MC_PREPARE_AVX_USAGE();
1285
1286 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1287 IEM_MC_FETCH_XREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), (bImm >> 6) & 3);
1288 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1289 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2);
1290 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
1291 IEM_MC_ADVANCE_RIP_AND_FINISH();
1292 IEM_MC_END();
1293 }
1294 else
1295 {
1296 /*
1297 * XMM, XMM, [mem32], imm8.
1298 */
1299 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1301 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1302 IEM_MC_LOCAL(uint32_t, uSrc2);
1303
1304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1305 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1306 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1307 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1308 IEM_MC_PREPARE_AVX_USAGE();
1309
1310 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1311 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1312 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1313 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2);
1314 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
1315 IEM_MC_ADVANCE_RIP_AND_FINISH();
1316 IEM_MC_END();
1317 }
1318}
1319
1320
1321/** Opcode VEX.66.0F3A 0x22. */
1322FNIEMOP_DEF(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib)
1323{
1324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1325 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1326 {
1327 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRQ, vpinsrq, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1328 if (IEM_IS_MODRM_REG_MODE(bRm))
1329 {
1330 /*
1331 * Register, register.
1332 */
1333 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1334 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1335 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1336 IEM_MC_LOCAL(uint64_t, uValue);
1337
1338 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1339 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1340 IEM_MC_PREPARE_AVX_USAGE();
1341
1342 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1343 IEM_MC_FETCH_GREG_U64(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1344 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1345 IEM_MC_STORE_XREG_U64( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uValue);
1346 IEM_MC_ADVANCE_RIP_AND_FINISH();
1347 IEM_MC_END();
1348 }
1349 else
1350 {
1351 /*
1352 * Register, memory.
1353 */
1354 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1356 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1357 IEM_MC_LOCAL(uint64_t, uValue);
1358
1359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1360 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1361 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1362 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1363 IEM_MC_PREPARE_AVX_USAGE();
1364
1365 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1366 IEM_MC_FETCH_MEM_U64(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1367 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1368 IEM_MC_STORE_XREG_U64( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uValue);
1369 IEM_MC_ADVANCE_RIP_AND_FINISH();
1370 IEM_MC_END();
1371 }
1372 }
1373 else
1374 {
1375 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRD, vpinsrd, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1376 if (IEM_IS_MODRM_REG_MODE(bRm))
1377 {
1378 /*
1379 * Register, register.
1380 */
1381 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1382 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1383 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1384 IEM_MC_LOCAL(uint32_t, uValue);
1385
1386 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1387 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1388 IEM_MC_PREPARE_AVX_USAGE();
1389
1390 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1391 IEM_MC_FETCH_GREG_U32(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1392 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1393 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
1394 IEM_MC_ADVANCE_RIP_AND_FINISH();
1395 IEM_MC_END();
1396 }
1397 else
1398 {
1399 /*
1400 * Register, memory.
1401 */
1402 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1404 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1405 IEM_MC_LOCAL(uint32_t, uValue);
1406
1407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1408 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1409 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1410 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1411 IEM_MC_PREPARE_AVX_USAGE();
1412
1413 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1414 IEM_MC_FETCH_MEM_U32(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1415 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1416 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
1417 IEM_MC_ADVANCE_RIP_AND_FINISH();
1418 IEM_MC_END();
1419 }
1420 }
1421}
1422
1423
1424/* Opcode VEX.66.0F3A 0x23 - invalid */
1425/* Opcode VEX.66.0F3A 0x24 - invalid */
1426/* Opcode VEX.66.0F3A 0x25 - invalid */
1427/* Opcode VEX.66.0F3A 0x26 - invalid */
1428/* Opcode VEX.66.0F3A 0x27 - invalid */
1429/* Opcode VEX.66.0F3A 0x28 - invalid */
1430/* Opcode VEX.66.0F3A 0x29 - invalid */
1431/* Opcode VEX.66.0F3A 0x2a - invalid */
1432/* Opcode VEX.66.0F3A 0x2b - invalid */
1433/* Opcode VEX.66.0F3A 0x2c - invalid */
1434/* Opcode VEX.66.0F3A 0x2d - invalid */
1435/* Opcode VEX.66.0F3A 0x2e - invalid */
1436/* Opcode VEX.66.0F3A 0x2f - invalid */
1437
1438
1439/* Opcode VEX.66.0F3A 0x30 - invalid */
1440/* Opcode VEX.66.0F3A 0x31 - invalid */
1441/* Opcode VEX.66.0F3A 0x32 - invalid */
1442/* Opcode VEX.66.0F3A 0x33 - invalid */
1443/* Opcode VEX.66.0F3A 0x34 - invalid */
1444/* Opcode VEX.66.0F3A 0x35 - invalid */
1445/* Opcode VEX.66.0F3A 0x36 - invalid */
1446/* Opcode VEX.66.0F3A 0x37 - invalid */
1447
1448
1449/** Opcode VEX.66.0F3A 0x38 (vex only). */
1450FNIEMOP_DEF(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib)
1451{
1452 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTI128, vinserti128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1454 if (IEM_IS_MODRM_REG_MODE(bRm))
1455 {
1456 /*
1457 * Register, register.
1458 */
1459 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1460 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1461 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1462 IEM_MC_LOCAL(RTUINT128U, uSrc);
1463
1464 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1465 IEM_MC_PREPARE_AVX_USAGE();
1466
1467 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1468 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1469 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1470
1471 IEM_MC_ADVANCE_RIP_AND_FINISH();
1472 IEM_MC_END();
1473 }
1474 else
1475 {
1476 /*
1477 * Register, memory.
1478 */
1479 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1480 IEM_MC_LOCAL(RTUINT128U, uSrc);
1481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1482
1483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1484 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1485 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1487 IEM_MC_PREPARE_AVX_USAGE();
1488
1489 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1490 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1491 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1492
1493 IEM_MC_ADVANCE_RIP_AND_FINISH();
1494 IEM_MC_END();
1495 }
1496}
1497
1498
1499/** Opcode VEX.66.0F3A 0x39 (vex only). */
1500FNIEMOP_DEF(iemOp_vextracti128_Wdq_Vqq_Ib)
1501{
1502 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTI128, vextracti128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
1503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1504 if (IEM_IS_MODRM_REG_MODE(bRm))
1505 {
1506 /*
1507 * Register, register.
1508 */
1509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1510 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1511 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1512
1513 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1514 IEM_MC_PREPARE_AVX_USAGE();
1515
1516 IEM_MC_LOCAL(RTUINT128U, uDst);
1517 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1518 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
1519
1520 IEM_MC_ADVANCE_RIP_AND_FINISH();
1521 IEM_MC_END();
1522 }
1523 else
1524 {
1525 /*
1526 * Register, memory.
1527 */
1528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1533 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1534 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1535 IEM_MC_PREPARE_AVX_USAGE();
1536
1537 IEM_MC_LOCAL(RTUINT128U, uDst);
1538 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1539 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
1540
1541 IEM_MC_ADVANCE_RIP_AND_FINISH();
1542 IEM_MC_END();
1543 }
1544}
1545
1546
1547/* Opcode VEX.66.0F3A 0x3a - invalid */
1548/* Opcode VEX.66.0F3A 0x3b - invalid */
1549/* Opcode VEX.66.0F3A 0x3c - invalid */
1550/* Opcode VEX.66.0F3A 0x3d - invalid */
1551/* Opcode VEX.66.0F3A 0x3e - invalid */
1552/* Opcode VEX.66.0F3A 0x3f - invalid */
1553
1554
1555/** Opcode VEX.66.0F3A 0x40. */
1556FNIEMOP_STUB(iemOp_vdpps_Vx_Hx_Wx_Ib);
1557/** Opcode VEX.66.0F3A 0x41, */
1558FNIEMOP_STUB(iemOp_vdppd_Vdq_Hdq_Wdq_Ib);
1559
1560
1561/** Opcode VEX.66.0F3A 0x42. */
1562FNIEMOP_DEF(iemOp_vmpsadbw_Vx_Hx_Wx_Ib)
1563{
1564 IEMOP_MNEMONIC4(VEX_RVMI, VMPSADBW, vmpsadbw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1565 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vmpsadbw);
1566 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1567}
1568
1569
1570/* Opcode VEX.66.0F3A 0x43 - invalid */
1571
1572
1573/** Opcode VEX.66.0F3A 0x44. */
1574FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
1575{
1576 IEMOP_MNEMONIC4(VEX_RVMI, VPCLMULQDQ, vpclmulqdq, Vdq_WO, Hdq, Wdq, Id, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1578 if (IEM_IS_MODRM_REG_MODE(bRm))
1579 {
1580 /*
1581 * Register, register.
1582 */
1583 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1584 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1585 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1586 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1587 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1588 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1589 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1590 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1591 IEM_MC_PREPARE_AVX_USAGE();
1592 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1593 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1594 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1595 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1596 puDst, puSrc1, puSrc2, bImmArg);
1597 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1598 IEM_MC_ADVANCE_RIP_AND_FINISH();
1599 IEM_MC_END();
1600 }
1601 else
1602 {
1603 /*
1604 * Register, memory.
1605 */
1606 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1607 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1609 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1610 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1611 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1612
1613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1614 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1615 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1616 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1617 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1618 IEM_MC_PREPARE_AVX_USAGE();
1619
1620 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1622 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1623 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1624 puDst, puSrc1, puSrc2, bImmArg);
1625 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1626
1627 IEM_MC_ADVANCE_RIP_AND_FINISH();
1628 IEM_MC_END();
1629 }
1630}
1631
1632
1633/* Opcode VEX.66.0F3A 0x45 - invalid */
1634
1635
1636/** Opcode VEX.66.0F3A 0x46 (vex only) */
1637FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib)
1638{
1639 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2I128, vperm2i128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ONE);
1640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1641 if (IEM_IS_MODRM_REG_MODE(bRm))
1642 {
1643 /*
1644 * Register, register.
1645 */
1646 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1647 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1648 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1649 IEM_MC_LOCAL(RTUINT256U, uDst);
1650 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1651 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1652 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1653 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1654 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1655 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1657 IEM_MC_PREPARE_AVX_USAGE();
1658 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1659 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1660 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1661 puDst, puSrc1, puSrc2, bImmArg);
1662 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1663 IEM_MC_ADVANCE_RIP_AND_FINISH();
1664 IEM_MC_END();
1665 }
1666 else
1667 {
1668 /*
1669 * Register, memory.
1670 */
1671 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1672 IEM_MC_LOCAL(RTUINT256U, uDst);
1673 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1674 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1676 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1677 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1678 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1679
1680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1681 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1682 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1683 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1684 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1685 IEM_MC_PREPARE_AVX_USAGE();
1686
1687 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1688 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1689 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1690 puDst, puSrc1, puSrc2, bImmArg);
1691 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1692
1693 IEM_MC_ADVANCE_RIP_AND_FINISH();
1694 IEM_MC_END();
1695 }
1696}
1697
1698
1699/* Opcode VEX.66.0F3A 0x47 - invalid */
1700/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
1701FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
1702/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
1703FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
1704
1705
1706/**
1707 * Common worker for AVX2 instructions on the forms:
1708 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
1709 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
1710 *
1711 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
1712 * Additionally, it triggers \#UD if VEX.W is 1.
1713 */
1714FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1715{
1716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1717 if (IEM_IS_MODRM_REG_MODE(bRm))
1718 {
1719 /*
1720 * Register, register.
1721 */
1722 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1723 if (pVCpu->iem.s.uVexLength)
1724 {
1725 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1726 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1727 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1728 IEM_MC_PREPARE_AVX_USAGE();
1729 IEM_MC_LOCAL(RTUINT256U, uDst);
1730 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1731 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1732 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1733 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1734 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1735 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1736 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1737 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1738 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1739 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1740 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1741 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1742 IEM_MC_ADVANCE_RIP_AND_FINISH();
1743 IEM_MC_END();
1744 }
1745 else
1746 {
1747 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1748 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1749 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1750 IEM_MC_PREPARE_AVX_USAGE();
1751 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1752 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1753 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1754 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1755 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1756 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1757 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1758 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1759 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1760 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1761 IEM_MC_ADVANCE_RIP_AND_FINISH();
1762 IEM_MC_END();
1763 }
1764 }
1765 else
1766 {
1767 /*
1768 * Register, memory.
1769 */
1770 if (pVCpu->iem.s.uVexLength)
1771 {
1772 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1775 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1776 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1777 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1778 IEM_MC_PREPARE_AVX_USAGE();
1779
1780 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1781 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1782 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1783
1784 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1785 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1786 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1787 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1788 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1789 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1790 IEM_MC_LOCAL(RTUINT256U, uDst);
1791 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1792 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1793 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1794
1795 IEM_MC_ADVANCE_RIP_AND_FINISH();
1796 IEM_MC_END();
1797 }
1798 else
1799 {
1800 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1803 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1804 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1805 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1806 IEM_MC_PREPARE_AVX_USAGE();
1807
1808 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1809 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1810 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1811
1812 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1813 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1814 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1815 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1816 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1817 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1818 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1819 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1820
1821 IEM_MC_ADVANCE_RIP_AND_FINISH();
1822 IEM_MC_END();
1823 }
1824 }
1825}
1826
1827
1828/** Opcode VEX.66.0F3A 0x4a (vex only).
1829 * AVX, AVX */
1830FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
1831{
1832 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPS, vblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
1833 IEMOPBLENDOP_INIT_VARS(vblendvps);
1834 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1835}
1836
1837
1838/** Opcode VEX.66.0F3A 0x4b (vex only).
1839 * AVX, AVX */
1840FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
1841{
1842 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPD, vblendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
1843 IEMOPBLENDOP_INIT_VARS(vblendvpd);
1844 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1845}
1846
1847
1848/**
1849 * Common worker for AVX2 instructions on the forms:
1850 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
1851 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
1852 *
1853 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
1854 * Additionally, both VEX.W and VEX.L must be zero.
1855 */
1856FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1857{
1858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1859 if (IEM_IS_MODRM_REG_MODE(bRm))
1860 {
1861 /*
1862 * Register, register.
1863 */
1864 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1865 if (pVCpu->iem.s.uVexLength)
1866 {
1867 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1868 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
1869 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1870 IEM_MC_PREPARE_AVX_USAGE();
1871
1872 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1873 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1874 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1875
1876 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1877 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1878 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1879
1880 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1881 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1882 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1883
1884 IEM_MC_LOCAL(RTUINT256U, uDst);
1885 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1886
1887 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1888
1889 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1890 IEM_MC_ADVANCE_RIP_AND_FINISH();
1891 IEM_MC_END();
1892 }
1893 else
1894 {
1895 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1896 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1897 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1898 IEM_MC_PREPARE_AVX_USAGE();
1899 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1900 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1901 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1902 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1903 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1904 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1905 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1906 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1907 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1908 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1909 IEM_MC_ADVANCE_RIP_AND_FINISH();
1910 IEM_MC_END();
1911 }
1912 }
1913 else
1914 {
1915 /*
1916 * Register, memory.
1917 */
1918 if (pVCpu->iem.s.uVexLength)
1919 {
1920 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1922
1923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1924 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1925
1926 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
1927 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1928 IEM_MC_PREPARE_AVX_USAGE();
1929
1930 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1931 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1932 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1933
1934 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1935 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1936 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1937
1938 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1939 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1940 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1941
1942 IEM_MC_LOCAL(RTUINT256U, uDst);
1943 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1944
1945 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1946
1947 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1948 IEM_MC_ADVANCE_RIP_AND_FINISH();
1949 IEM_MC_END();
1950 }
1951 else
1952 {
1953 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1956 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1957
1958 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1959 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1960 IEM_MC_PREPARE_AVX_USAGE();
1961
1962 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1963 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1964 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1965
1966 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1967 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1968 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1969 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1970 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1971 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1972 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1973 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1974
1975 IEM_MC_ADVANCE_RIP_AND_FINISH();
1976 IEM_MC_END();
1977 }
1978 }
1979}
1980
1981
1982/** Opcode VEX.66.0F3A 0x4c (vex only).
1983 * AVX, AVX2 */
1984FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
1985{
1986 /** @todo testcase: cover VEX.W=1 and check that it triggers \#UD on both real
1987 * and emulated hardware. */
1988 IEMOP_MNEMONIC4(VEX_RVMR, VPBLENDVB, vpblendvb, Vx_WO, Hx, Wx, Lx, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_W_ZERO);
1989 IEMOPBLENDOP_INIT_VARS(vpblendvb);
1990 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1991}
1992
1993
1994/* Opcode VEX.66.0F3A 0x4d - invalid */
1995/* Opcode VEX.66.0F3A 0x4e - invalid */
1996/* Opcode VEX.66.0F3A 0x4f - invalid */
1997
1998
1999/* Opcode VEX.66.0F3A 0x50 - invalid */
2000/* Opcode VEX.66.0F3A 0x51 - invalid */
2001/* Opcode VEX.66.0F3A 0x52 - invalid */
2002/* Opcode VEX.66.0F3A 0x53 - invalid */
2003/* Opcode VEX.66.0F3A 0x54 - invalid */
2004/* Opcode VEX.66.0F3A 0x55 - invalid */
2005/* Opcode VEX.66.0F3A 0x56 - invalid */
2006/* Opcode VEX.66.0F3A 0x57 - invalid */
2007/* Opcode VEX.66.0F3A 0x58 - invalid */
2008/* Opcode VEX.66.0F3A 0x59 - invalid */
2009/* Opcode VEX.66.0F3A 0x5a - invalid */
2010/* Opcode VEX.66.0F3A 0x5b - invalid */
2011/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
2012FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
2013/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
2014FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
2015/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
2016FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
2017/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
2018FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
2019
2020
2021/**
2022 * @opcode 0x60
2023 * @oppfx 0x66
2024 * @opflmodify cf,pf,af,zf,sf,of
2025 * @opflclear pf,af
2026 */
2027FNIEMOP_DEF(iemOp_vpcmpestrm_Vdq_Wdq_Ib)
2028{
2029 IEMOP_MNEMONIC3(VEX_RMI, VPCMPESTRM, vpcmpestrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2030
2031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2032 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2033 {
2034 if (IEM_IS_MODRM_REG_MODE(bRm))
2035 {
2036 /*
2037 * Register, register.
2038 */
2039 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2040 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2041 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2042 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2043 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2044 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2045 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2046 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2047 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2048 IEM_MC_PREPARE_SSE_USAGE();
2049 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2050 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2051 IEM_MC_REF_EFLAGS(pEFlags);
2052 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2053 iemAImpl_vpcmpestrm_u128,
2054 iemAImpl_vpcmpestrm_u128_fallback),
2055 puDst, pEFlags, pSrc, bImmArg);
2056 IEM_MC_ADVANCE_RIP_AND_FINISH();
2057 IEM_MC_END();
2058 }
2059 else
2060 {
2061 /*
2062 * Register, memory.
2063 */
2064 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2065 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2066 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2067 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2068 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2070
2071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2072 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2073 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2074 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2075 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2076 IEM_MC_PREPARE_SSE_USAGE();
2077
2078 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2079 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2080 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2081 IEM_MC_REF_EFLAGS(pEFlags);
2082 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2083 iemAImpl_vpcmpestrm_u128,
2084 iemAImpl_vpcmpestrm_u128_fallback),
2085 puDst, pEFlags, pSrc, bImmArg);
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089 }
2090 else
2091 {
2092 if (IEM_IS_MODRM_REG_MODE(bRm))
2093 {
2094 /*
2095 * Register, register.
2096 */
2097 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2098 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2099 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2100 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2101 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2102 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2103 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2104 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2105 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2106 IEM_MC_PREPARE_SSE_USAGE();
2107 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2108 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2109 IEM_MC_REF_EFLAGS(pEFlags);
2110 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
2111 iemAImpl_vpcmpestrm_u128,
2112 iemAImpl_vpcmpestrm_u128_fallback),
2113 puDst, pEFlags, pSrc, bImmArg);
2114 IEM_MC_ADVANCE_RIP_AND_FINISH();
2115 IEM_MC_END();
2116 }
2117 else
2118 {
2119 /*
2120 * Register, memory.
2121 */
2122 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2123 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2124 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2125 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2126 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2128
2129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2130 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2131 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2132 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2133 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2134 IEM_MC_PREPARE_SSE_USAGE();
2135
2136 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2137 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2138 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2139 IEM_MC_REF_EFLAGS(pEFlags);
2140 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2141 iemAImpl_vpcmpestrm_u128,
2142 iemAImpl_vpcmpestrm_u128_fallback),
2143 puDst, pEFlags, pSrc, bImmArg);
2144 IEM_MC_ADVANCE_RIP_AND_FINISH();
2145 IEM_MC_END();
2146 }
2147 }
2148}
2149
2150
2151/**
2152 * @opcode 0x61
2153 * @oppfx 0x66
2154 * @opflmodify cf,pf,af,zf,sf,of
2155 * @opflclear pf,af
2156 */
2157FNIEMOP_DEF(iemOp_vpcmpestri_Vdq_Wdq_Ib)
2158{
2159 IEMOP_MNEMONIC3(VEX_RMI, VPCMPESTRI, vpcmpestri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2160
2161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2162 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2163 {
2164 if (IEM_IS_MODRM_REG_MODE(bRm))
2165 {
2166 /*
2167 * Register, register.
2168 */
2169 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2170 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2171 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2172 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2173 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2174 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2175 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2176 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2178 IEM_MC_PREPARE_SSE_USAGE();
2179 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2180 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2181 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2182 IEM_MC_REF_EFLAGS(pEFlags);
2183 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2184 iemAImpl_vpcmpestri_u128,
2185 iemAImpl_vpcmpestri_u128_fallback),
2186 pu32Ecx, pEFlags, pSrc, bImmArg);
2187 /** @todo testcase: High dword of RCX cleared? */
2188 IEM_MC_ADVANCE_RIP_AND_FINISH();
2189 IEM_MC_END();
2190 }
2191 else
2192 {
2193 /*
2194 * Register, memory.
2195 */
2196 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2197 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2198 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2199 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2200 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2202
2203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2204 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2205 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2206 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2207 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2208 IEM_MC_PREPARE_SSE_USAGE();
2209
2210 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2211 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2212 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2213 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2214 IEM_MC_REF_EFLAGS(pEFlags);
2215 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2216 iemAImpl_vpcmpestri_u128,
2217 iemAImpl_vpcmpestri_u128_fallback),
2218 pu32Ecx, pEFlags, pSrc, bImmArg);
2219 /** @todo testcase: High dword of RCX cleared? */
2220 IEM_MC_ADVANCE_RIP_AND_FINISH();
2221 IEM_MC_END();
2222 }
2223 }
2224 else
2225 {
2226 if (IEM_IS_MODRM_REG_MODE(bRm))
2227 {
2228 /*
2229 * Register, register.
2230 */
2231 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2233 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2234 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2235 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2236 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2237 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2238 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2239 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2240 IEM_MC_PREPARE_SSE_USAGE();
2241 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2242 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2243 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2244 IEM_MC_REF_EFLAGS(pEFlags);
2245 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2246 iemAImpl_vpcmpestri_u128,
2247 iemAImpl_vpcmpestri_u128_fallback),
2248 pu32Ecx, pEFlags, pSrc, bImmArg);
2249 /** @todo testcase: High dword of RCX cleared? */
2250 IEM_MC_ADVANCE_RIP_AND_FINISH();
2251 IEM_MC_END();
2252 }
2253 else
2254 {
2255 /*
2256 * Register, memory.
2257 */
2258 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2259 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2260 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2261 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2262 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2264
2265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2266 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2267 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2268 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2269 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2270 IEM_MC_PREPARE_SSE_USAGE();
2271
2272 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2273 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2274 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2275 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2276 IEM_MC_REF_EFLAGS(pEFlags);
2277 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2278 iemAImpl_vpcmpestri_u128,
2279 iemAImpl_vpcmpestri_u128_fallback),
2280 pu32Ecx, pEFlags, pSrc, bImmArg);
2281 /** @todo testcase: High dword of RCX cleared? */
2282 IEM_MC_ADVANCE_RIP_AND_FINISH();
2283 IEM_MC_END();
2284 }
2285 }
2286}
2287
2288
2289/**
2290 * @opcode 0x62
2291 * @oppfx 0x66
2292 * @opflmodify cf,pf,af,zf,sf,of
2293 * @opflclear pf,af
2294 */
2295FNIEMOP_DEF(iemOp_vpcmpistrm_Vdq_Wdq_Ib)
2296{
2297 IEMOP_MNEMONIC3(VEX_RMI, VPCMPISTRM, vpcmpistrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2298
2299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2300 if (IEM_IS_MODRM_REG_MODE(bRm))
2301 {
2302 /*
2303 * Register, register.
2304 */
2305 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2306 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2307 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2308 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2309 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2310 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
2311 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
2312 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2314 IEM_MC_PREPARE_SSE_USAGE();
2315 IEM_MC_FETCH_XREG_PAIR_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2316 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2317 IEM_MC_REF_EFLAGS(pEFlags);
2318 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2319 iemAImpl_vpcmpistrm_u128,
2320 iemAImpl_vpcmpistrm_u128_fallback),
2321 puDst, pEFlags, pSrc, bImmArg);
2322 IEM_MC_ADVANCE_RIP_AND_FINISH();
2323 IEM_MC_END();
2324 }
2325 else
2326 {
2327 /*
2328 * Register, memory.
2329 */
2330 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2331 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2332 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2333 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
2334 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
2335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2336
2337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2338 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2339 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2340 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2341 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2342 IEM_MC_PREPARE_SSE_USAGE();
2343
2344 IEM_MC_FETCH_MEM_U128_AND_XREG_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2345 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2346 IEM_MC_REF_EFLAGS(pEFlags);
2347 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2348 iemAImpl_vpcmpistrm_u128,
2349 iemAImpl_vpcmpistrm_u128_fallback),
2350 puDst, pEFlags, pSrc, bImmArg);
2351 IEM_MC_ADVANCE_RIP_AND_FINISH();
2352 IEM_MC_END();
2353 }
2354}
2355
2356
2357/**
2358 * @opcode 0x63
2359 * @oppfx 0x66
2360 * @opflmodify cf,pf,af,zf,sf,of
2361 * @opflclear pf,af
2362 */
2363FNIEMOP_DEF(iemOp_vpcmpistri_Vdq_Wdq_Ib)
2364{
2365 IEMOP_MNEMONIC3(VEX_RMI, VPCMPISTRI, vpcmpistri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2366
2367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2368 if (IEM_IS_MODRM_REG_MODE(bRm))
2369 {
2370 /*
2371 * Register, register.
2372 */
2373 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2374 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2375 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2376 IEM_MC_ARG(uint32_t *, pEFlags, 0);
2377 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
2378 IEM_MC_ARG(PCRTUINT128U, pSrc2, 2);
2379 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2380 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2381 IEM_MC_PREPARE_SSE_USAGE();
2382 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2383 IEM_MC_REF_XREG_U128_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2384 IEM_MC_REF_EFLAGS(pEFlags);
2385 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
2386 IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2387 iemAImpl_vpcmpistri_u128,
2388 iemAImpl_vpcmpistri_u128_fallback),
2389 pEFlags, pSrc1, pSrc2, bImmArg);
2390 /** @todo testcase: High dword of RCX cleared? */
2391 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
2392 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2393
2394 IEM_MC_ADVANCE_RIP_AND_FINISH();
2395 IEM_MC_END();
2396 }
2397 else
2398 {
2399 /*
2400 * Register, memory.
2401 */
2402 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2403 IEM_MC_ARG(uint32_t *, pEFlags, 0);
2404 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
2405 IEM_MC_LOCAL(RTUINT128U, Src2);
2406 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc2, Src2, 2);
2407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2408
2409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2410 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2411 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2412 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2414 IEM_MC_PREPARE_SSE_USAGE();
2415
2416 IEM_MC_FETCH_MEM_U128(Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2417 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2418 IEM_MC_REF_EFLAGS(pEFlags);
2419 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
2420 IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2421 iemAImpl_vpcmpistri_u128,
2422 iemAImpl_vpcmpistri_u128_fallback),
2423 pEFlags, pSrc1, pSrc2, bImmArg);
2424 /** @todo testcase: High dword of RCX cleared? */
2425 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
2426 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2427 IEM_MC_ADVANCE_RIP_AND_FINISH();
2428 IEM_MC_END();
2429 }
2430}
2431
2432
2433/* Opcode VEX.66.0F3A 0x64 - invalid */
2434/* Opcode VEX.66.0F3A 0x65 - invalid */
2435/* Opcode VEX.66.0F3A 0x66 - invalid */
2436/* Opcode VEX.66.0F3A 0x67 - invalid */
2437/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
2438FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
2439/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
2440FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
2441/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
2442FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
2443/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
2444FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
2445/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
2446FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
2447/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
2448FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
2449/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
2450FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
2451/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
2452FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
2453
2454/* Opcode VEX.66.0F3A 0x70 - invalid */
2455/* Opcode VEX.66.0F3A 0x71 - invalid */
2456/* Opcode VEX.66.0F3A 0x72 - invalid */
2457/* Opcode VEX.66.0F3A 0x73 - invalid */
2458/* Opcode VEX.66.0F3A 0x74 - invalid */
2459/* Opcode VEX.66.0F3A 0x75 - invalid */
2460/* Opcode VEX.66.0F3A 0x76 - invalid */
2461/* Opcode VEX.66.0F3A 0x77 - invalid */
2462/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
2463FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
2464/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
2465FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
2466/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
2467FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
2468/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
2469FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
2470/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
2471FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
2472/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
2473FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
2474/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
2475FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
2476/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
2477FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
2478
2479/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
2480
2481
2482/* Opcode 0x0f 0xc0 - invalid */
2483/* Opcode 0x0f 0xc1 - invalid */
2484/* Opcode 0x0f 0xc2 - invalid */
2485/* Opcode 0x0f 0xc3 - invalid */
2486/* Opcode 0x0f 0xc4 - invalid */
2487/* Opcode 0x0f 0xc5 - invalid */
2488/* Opcode 0x0f 0xc6 - invalid */
2489/* Opcode 0x0f 0xc7 - invalid */
2490/* Opcode 0x0f 0xc8 - invalid */
2491/* Opcode 0x0f 0xc9 - invalid */
2492/* Opcode 0x0f 0xca - invalid */
2493/* Opcode 0x0f 0xcb - invalid */
2494/* Opcode 0x0f 0xcc - invalid */
2495/* Opcode 0x0f 0xcd - invalid */
2496/* Opcode 0x0f 0xce - invalid */
2497/* Opcode 0x0f 0xcf - invalid */
2498
2499
2500/* Opcode VEX.66.0F3A 0xd0 - invalid */
2501/* Opcode VEX.66.0F3A 0xd1 - invalid */
2502/* Opcode VEX.66.0F3A 0xd2 - invalid */
2503/* Opcode VEX.66.0F3A 0xd3 - invalid */
2504/* Opcode VEX.66.0F3A 0xd4 - invalid */
2505/* Opcode VEX.66.0F3A 0xd5 - invalid */
2506/* Opcode VEX.66.0F3A 0xd6 - invalid */
2507/* Opcode VEX.66.0F3A 0xd7 - invalid */
2508/* Opcode VEX.66.0F3A 0xd8 - invalid */
2509/* Opcode VEX.66.0F3A 0xd9 - invalid */
2510/* Opcode VEX.66.0F3A 0xda - invalid */
2511/* Opcode VEX.66.0F3A 0xdb - invalid */
2512/* Opcode VEX.66.0F3A 0xdc - invalid */
2513/* Opcode VEX.66.0F3A 0xdd - invalid */
2514/* Opcode VEX.66.0F3A 0xde - invalid */
2515
2516
2517/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
2518FNIEMOP_DEF(iemOp_vaeskeygen_Vdq_Wdq_Ib)
2519{
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if (IEM_IS_MODRM_REG_MODE(bRm))
2522 {
2523 /*
2524 * Register, register.
2525 */
2526 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2527 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2528 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2529 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2530 IEM_MC_PREPARE_AVX_USAGE();
2531 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2532 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2533 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
2534 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2535 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2536 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback),
2537 puDst, puSrc, bImmArg);
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 }
2541 else
2542 {
2543 /*
2544 * Register, memory.
2545 */
2546 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2549 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2550 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
2551 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2552
2553 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2554 IEM_MC_PREPARE_AVX_USAGE();
2555 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2556 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2557 IEM_MC_LOCAL(RTUINT128U, uSrc);
2558 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2559 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2560 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback),
2561 puDst, puSrc, bImmArg);
2562 IEM_MC_ADVANCE_RIP_AND_FINISH();
2563 IEM_MC_END();
2564 }
2565}
2566
2567
2568/**
2569 * @opcode 0xf0
2570 * @oppfx 0xf2
2571 * @opflclass unchanged
2572 */
2573FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
2574{
2575 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
2576 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2578 if (IEM_IS_MODRM_REG_MODE(bRm))
2579 {
2580 /*
2581 * Register, register.
2582 */
2583 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2584 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2585 {
2586 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2587 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2588 IEM_MC_ARG(uint64_t *, pDst, 0);
2589 IEM_MC_ARG(uint64_t, uSrc1, 1);
2590 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
2591 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
2592 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2593 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
2594 IEM_MC_ADVANCE_RIP_AND_FINISH();
2595 IEM_MC_END();
2596 }
2597 else
2598 {
2599 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2600 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2601 IEM_MC_ARG(uint32_t *, pDst, 0);
2602 IEM_MC_ARG(uint32_t, uSrc1, 1);
2603 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
2604 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
2605 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2606 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
2607 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2608 IEM_MC_ADVANCE_RIP_AND_FINISH();
2609 IEM_MC_END();
2610 }
2611 }
2612 else
2613 {
2614 /*
2615 * Register, memory.
2616 */
2617 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2618 {
2619 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2620 IEM_MC_ARG(uint64_t *, pDst, 0);
2621 IEM_MC_ARG(uint64_t, uSrc1, 1);
2622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2624 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2625 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
2626 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2627 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2628 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2629 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
2630 IEM_MC_ADVANCE_RIP_AND_FINISH();
2631 IEM_MC_END();
2632 }
2633 else
2634 {
2635 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2636 IEM_MC_ARG(uint32_t *, pDst, 0);
2637 IEM_MC_ARG(uint32_t, uSrc1, 1);
2638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2640 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2641 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
2642 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2643 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2644 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2645 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
2646 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2647 IEM_MC_ADVANCE_RIP_AND_FINISH();
2648 IEM_MC_END();
2649 }
2650 }
2651}
2652
2653
2654/**
2655 * VEX opcode map \#3.
2656 *
2657 * @sa g_apfnThreeByte0f3a
2658 */
2659const PFNIEMOP g_apfnVexMap3[] =
2660{
2661 /* no prefix, 066h prefix f3h prefix, f2h prefix */
2662 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2663 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermpd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2664 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2665 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2666 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2667 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2668 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2669 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2670 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2671 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2672 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2673 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2674 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2675 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2676 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2677 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2678
2679 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2680 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2681 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2682 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2683 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_Eb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2684 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_Ew_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2685 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_Ey_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2686 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2687 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2688 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2689 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2690 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2691 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2692 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2693 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2694 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2695
2696 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2697 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2698 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2699 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2700 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2701 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2702 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2703 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2704 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2705 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2706 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2707 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2708 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2709 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2710 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2711 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2712
2713 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2714 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2715 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2716 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2717 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2718 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2719 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2720 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2721 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2722 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2723 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2724 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2725 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2726 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2727 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2728 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2729
2730 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2731 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2732 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2733 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2734 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2735 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2736 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2737 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2738 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2739 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2740 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2741 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2742 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2743 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2744 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2745 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2746
2747 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2748 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2749 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2750 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2751 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2752 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2753 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2754 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2755 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2756 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2757 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2758 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2759 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2760 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2761 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2762 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2763
2764 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2765 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2766 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2767 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2768 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2769 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2770 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2771 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2772 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2773 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2774 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2775 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2776 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2777 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2778 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2779 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2780
2781 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2782 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2783 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2784 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2785 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2786 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2787 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2788 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2789 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2790 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2791 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2792 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2793 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2794 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2795 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2796 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2797
2798 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2799 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2800 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2801 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2802 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2803 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2804 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2805 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2806 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2807 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2808 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2809 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2810 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2811 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2812 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2813 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2814
2815 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2816 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2817 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2818 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2819 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2820 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2821 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2822 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2823 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2824 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2825 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2826 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2827 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2828 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2829 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2830 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2831
2832 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2833 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2834 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2835 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2836 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2837 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2838 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2839 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2840 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2841 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2842 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2843 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2844 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2845 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2846 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2847 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2848
2849 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2850 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2851 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2852 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2853 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2854 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2855 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2856 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2857 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2858 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2859 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2860 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2861 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2862 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2863 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2864 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2865
2866 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2867 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2868 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2869 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2870 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2871 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2872 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2873 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2874 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2875 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2876 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2877 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2878 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2879 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2880 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2881 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2882
2883 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2884 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2885 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2886 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2887 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2888 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2889 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2890 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2891 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2892 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2893 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2894 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2895 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2896 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2897 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2898 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2899
2900 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2901 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2902 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2903 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2904 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2905 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2906 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2907 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2908 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2909 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2910 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2911 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2912 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2913 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2914 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2915 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2916
2917 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
2918 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2919 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2920 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2921 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2922 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2923 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2924 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2925 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2926 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2927 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2928 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2929 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2930 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2931 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2932 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2933};
2934AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
2935
2936/** @} */
2937
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette