VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h@ 105291

Last change on this file since 105291 was 105279, checked in by vboxsync, 7 months ago

VMM/IEM: Implement vpcmpestrm, vpcmpestri, vpcmpistrm, vpcmpistri instruction emulation, bugref:9898 [build fix]

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 113.6 KB
Line 
1/* $Id: IEMAllInstVexMap3.cpp.h 105279 2024-07-11 17:19:12Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
56 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
57 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
58 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
59 IEM_MC_PREPARE_AVX_USAGE();
60
61 IEM_MC_LOCAL(RTUINT256U, uDst);
62 IEM_MC_LOCAL(RTUINT256U, uSrc1);
63 IEM_MC_LOCAL(RTUINT256U, uSrc2);
64 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
65 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
66 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
67 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
68 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
69 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
70 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
71 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
72 IEM_MC_ADVANCE_RIP_AND_FINISH();
73 IEM_MC_END();
74 }
75 else
76 {
77 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
78 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
79 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
80 IEM_MC_ARG(PRTUINT128U, puDst, 0);
81 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
82 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
83 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
84 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
85 IEM_MC_PREPARE_AVX_USAGE();
86 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
87 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
88 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
89 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
90 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94 }
95 else
96 {
97 /*
98 * Register, memory.
99 */
100 if (pVCpu->iem.s.uVexLength)
101 {
102 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
103 IEM_MC_LOCAL(RTUINT256U, uDst);
104 IEM_MC_LOCAL(RTUINT256U, uSrc1);
105 IEM_MC_LOCAL(RTUINT256U, uSrc2);
106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
107 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
109 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
110
111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
113 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
114 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
116 IEM_MC_PREPARE_AVX_USAGE();
117
118 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
119 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
120 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
121 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
122
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
129 IEM_MC_LOCAL(RTUINT128U, uSrc2);
130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
132 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
134
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
136 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
137 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
138 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
140 IEM_MC_PREPARE_AVX_USAGE();
141
142 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
143 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
144 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
145 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
146 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
147
148 IEM_MC_ADVANCE_RIP_AND_FINISH();
149 IEM_MC_END();
150 }
151 }
152}
153
154
155/**
156 * Common worker for AVX instructions on the forms:
157 * - vxxxp{s,d} xmm0, xmm1/mem128, imm8
158 * - vxxxp{s,d} ymm0, ymm1/mem256, imm8
159 *
160 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
161 */
162FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib, PCIEMOPMEDIAF2IMM8, pImpl)
163{
164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
165 if (IEM_IS_MODRM_REG_MODE(bRm))
166 {
167 /*
168 * Register, register.
169 */
170 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
171 if (pVCpu->iem.s.uVexLength)
172 {
173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
176 IEM_MC_PREPARE_AVX_USAGE();
177 IEM_MC_LOCAL(X86YMMREG, uDst);
178 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
179 IEM_MC_LOCAL(X86YMMREG, uSrc);
180 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
181 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
183 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
185 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
186 IEM_MC_ADVANCE_RIP_AND_FINISH();
187 IEM_MC_END();
188 }
189 else
190 {
191 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
192 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
193 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
194 IEM_MC_PREPARE_AVX_USAGE();
195 IEM_MC_LOCAL(X86XMMREG, uDst);
196 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
197 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
198 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
199 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
200 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
201 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
202 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
203 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
204 IEM_MC_ADVANCE_RIP_AND_FINISH();
205 IEM_MC_END();
206 }
207 }
208 else
209 {
210 /*
211 * Register, memory.
212 */
213 if (pVCpu->iem.s.uVexLength)
214 {
215 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
218 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
219 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
220 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
221 IEM_MC_PREPARE_AVX_USAGE();
222 IEM_MC_LOCAL(X86YMMREG, uDst);
223 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
224 IEM_MC_LOCAL(X86YMMREG, uSrc);
225 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
226 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
227 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
228 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
229 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
230 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
231 IEM_MC_ADVANCE_RIP_AND_FINISH();
232 IEM_MC_END();
233 }
234 else
235 {
236 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
239 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
240 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
242 IEM_MC_PREPARE_AVX_USAGE();
243 IEM_MC_LOCAL(X86XMMREG, uDst);
244 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
245 IEM_MC_LOCAL(X86XMMREG, uSrc);
246 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
247 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
248 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
249 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
250 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
251 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
252 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
253 IEM_MC_ADVANCE_RIP_AND_FINISH();
254 IEM_MC_END();
255 }
256 }
257}
258
259
260/**
261 * Common worker for AVX instructions on the forms:
262 * - vpermilps/d xmm0, xmm1/mem128, imm8
263 * - vpermilps/d ymm0, ymm1/mem256, imm8
264 *
265 * Takes function table for function w/o implicit state parameter.
266 *
267 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
268 */
269FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF2IMM8, pImpl)
270{
271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
272 if (IEM_IS_MODRM_REG_MODE(bRm))
273 {
274 /*
275 * Register, register.
276 */
277 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
278 if (pVCpu->iem.s.uVexLength)
279 {
280 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
281 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
282 IEM_MC_LOCAL(RTUINT256U, uDst);
283 IEM_MC_LOCAL(RTUINT256U, uSrc);
284 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
285 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
286 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
287 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
288 IEM_MC_PREPARE_AVX_USAGE();
289 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
291 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
292 IEM_MC_ADVANCE_RIP_AND_FINISH();
293 IEM_MC_END();
294 }
295 else
296 {
297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
299 IEM_MC_ARG(PRTUINT128U, puDst, 0);
300 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
301 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
302 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
303 IEM_MC_PREPARE_AVX_USAGE();
304 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
305 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
307 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
308 IEM_MC_ADVANCE_RIP_AND_FINISH();
309 IEM_MC_END();
310 }
311 }
312 else
313 {
314 /*
315 * Register, memory.
316 */
317 if (pVCpu->iem.s.uVexLength)
318 {
319 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
320 IEM_MC_LOCAL(RTUINT256U, uDst);
321 IEM_MC_LOCAL(RTUINT256U, uSrc);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
324 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
325
326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
327 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
328 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
329 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
331 IEM_MC_PREPARE_AVX_USAGE();
332
333 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
335 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
336
337 IEM_MC_ADVANCE_RIP_AND_FINISH();
338 IEM_MC_END();
339 }
340 else
341 {
342 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
343 IEM_MC_LOCAL(RTUINT128U, uSrc);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG(PRTUINT128U, puDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
347
348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
349 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
350 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
351 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
353 IEM_MC_PREPARE_AVX_USAGE();
354
355 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
356 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
357 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
358 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
359
360 IEM_MC_ADVANCE_RIP_AND_FINISH();
361 IEM_MC_END();
362 }
363 }
364}
365
366
367/**
368 * Common worker for AVX instructions on the forms:
369 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
370 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
371 *
372 * Takes function table for function w/o implicit state parameter.
373 *
374 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
375 */
376FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
377{
378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
379 if (IEM_IS_MODRM_REG_MODE(bRm))
380 {
381 /*
382 * Register, register.
383 */
384 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
385 if (pVCpu->iem.s.uVexLength)
386 {
387 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
388 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
389 IEM_MC_LOCAL(RTUINT256U, uDst);
390 IEM_MC_LOCAL(RTUINT256U, uSrc1);
391 IEM_MC_LOCAL(RTUINT256U, uSrc2);
392 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
393 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
394 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
395 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
396 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
397 IEM_MC_PREPARE_AVX_USAGE();
398 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
399 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
400 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
401 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
402 IEM_MC_ADVANCE_RIP_AND_FINISH();
403 IEM_MC_END();
404 }
405 else
406 {
407 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
408 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
409 IEM_MC_ARG(PRTUINT128U, puDst, 0);
410 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
411 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
412 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
413 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
414 IEM_MC_PREPARE_AVX_USAGE();
415 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
416 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
417 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
418 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
419 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
420 IEM_MC_ADVANCE_RIP_AND_FINISH();
421 IEM_MC_END();
422 }
423 }
424 else
425 {
426 /*
427 * Register, memory.
428 */
429 if (pVCpu->iem.s.uVexLength)
430 {
431 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_LOCAL(RTUINT256U, uDst);
433 IEM_MC_LOCAL(RTUINT256U, uSrc1);
434 IEM_MC_LOCAL(RTUINT256U, uSrc2);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
437 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
438 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
439
440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
441 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
442 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
443 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
444 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
445 IEM_MC_PREPARE_AVX_USAGE();
446
447 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
448 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
449 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451
452 IEM_MC_ADVANCE_RIP_AND_FINISH();
453 IEM_MC_END();
454 }
455 else
456 {
457 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
458 IEM_MC_LOCAL(RTUINT128U, uSrc2);
459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
460 IEM_MC_ARG(PRTUINT128U, puDst, 0);
461 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
462 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
463
464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
465 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
466 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
467 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
468 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
469 IEM_MC_PREPARE_AVX_USAGE();
470
471 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
472 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
473 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
474 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
475 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
476
477 IEM_MC_ADVANCE_RIP_AND_FINISH();
478 IEM_MC_END();
479 }
480 }
481}
482
483
484/** Opcode VEX.66.0F3A 0x00. */
485FNIEMOP_STUB(iemOp_vpermq_Vqq_Wqq_Ib);
486/** Opcode VEX.66.0F3A 0x01. */
487FNIEMOP_STUB(iemOp_vpermpd_Vqq_Wqq_Ib);
488
489
490/** Opcode VEX.66.0F3A 0x02.
491 * AVX2,AVX2 */
492FNIEMOP_DEF(iemOp_vpblendd_Vx_Hx_Wx_Ib)
493{
494 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDD, vpblendd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
495 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendd);
496 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
497}
498
499
500/* Opcode VEX.66.0F3A 0x03 - invalid */
501
502
503/** Opcode VEX.66.0F3A 0x04.
504 * AVX,AVX */
505FNIEMOP_DEF(iemOp_vpermilps_Vx_Wx_Ib)
506{
507 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPS, vpermilps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
508 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilps);
509 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
510}
511
512
513/** Opcode VEX.66.0F3A 0x05.
514 * AVX,AVX */
515FNIEMOP_DEF(iemOp_vpermilpd_Vx_Wx_Ib)
516{
517 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPD, vpermilpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
518 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilpd);
519 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
520}
521
522
523/** Opcode VEX.66.0F3A 0x06 (vex only) */
524FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib)
525{
526 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2F128, vperm2f128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0);
527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
528 if (IEM_IS_MODRM_REG_MODE(bRm))
529 {
530 /*
531 * Register, register.
532 */
533 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
534 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
535 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
536 IEM_MC_LOCAL(RTUINT256U, uDst);
537 IEM_MC_LOCAL(RTUINT256U, uSrc1);
538 IEM_MC_LOCAL(RTUINT256U, uSrc2);
539 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
540 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
541 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
542 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
543 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
544 IEM_MC_PREPARE_AVX_USAGE();
545 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
546 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
547 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
548 puDst, puSrc1, puSrc2, bImmArg);
549 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
550 IEM_MC_ADVANCE_RIP_AND_FINISH();
551 IEM_MC_END();
552 }
553 else
554 {
555 /*
556 * Register, memory.
557 */
558 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
559 IEM_MC_LOCAL(RTUINT256U, uDst);
560 IEM_MC_LOCAL(RTUINT256U, uSrc1);
561 IEM_MC_LOCAL(RTUINT256U, uSrc2);
562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
563 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
564 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
565 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
566
567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
568 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
569 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
570 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
571 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
572 IEM_MC_PREPARE_AVX_USAGE();
573
574 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
575 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
576 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
577 puDst, puSrc1, puSrc2, bImmArg);
578 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
579
580 IEM_MC_ADVANCE_RIP_AND_FINISH();
581 IEM_MC_END();
582 }
583}
584
585
586/* Opcode VEX.66.0F3A 0x07 - invalid */
587
588
589/** Opcode VEX.66.0F3A 0x08. */
590FNIEMOP_DEF(iemOp_vroundps_Vx_Wx_Ib)
591{
592 IEMOP_MNEMONIC3(VEX_RMI, VROUNDPS, vroundps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
593 IEMOPMEDIAF2IMM8_INIT_VARS( vroundps);
594 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
595}
596
597
598/** Opcode VEX.66.0F3A 0x09. */
599FNIEMOP_DEF(iemOp_vroundpd_Vx_Wx_Ib)
600{
601 IEMOP_MNEMONIC3(VEX_RMI, VROUNDPD, vroundpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
602 IEMOPMEDIAF2IMM8_INIT_VARS( vroundpd);
603 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
604}
605
606
607/** Opcode VEX.66.0F3A 0x0a. */
608FNIEMOP_STUB(iemOp_vroundss_Vss_Wss_Ib);
609/** Opcode VEX.66.0F3A 0x0b. */
610FNIEMOP_STUB(iemOp_vroundsd_Vsd_Wsd_Ib);
611
612
613/** Opcode VEX.66.0F3A 0x0c.
614 * AVX,AVX */
615FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
616{
617 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPS, vblendps, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
618 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
619 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
620}
621
622
623/** Opcode VEX.66.0F3A 0x0d.
624 * AVX,AVX */
625FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
626{
627 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPD, vblendpd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
628 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
629 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
630}
631
632
633/** Opcode VEX.66.0F3A 0x0e.
634 * AVX,AVX2 */
635FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
636{
637 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDW, vpblendw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
638 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
639 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
640}
641
642
643/** Opcode VEX.0F3A 0x0f - invalid. */
644
645
646/** Opcode VEX.66.0F3A 0x0f.
647 * AVX,AVX2 */
648FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
649{
650 IEMOP_MNEMONIC4(VEX_RVMI, VPALIGNR, vpalignr, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
651 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
652 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
653}
654
655
656/* Opcode VEX.66.0F3A 0x10 - invalid */
657/* Opcode VEX.66.0F3A 0x11 - invalid */
658/* Opcode VEX.66.0F3A 0x12 - invalid */
659/* Opcode VEX.66.0F3A 0x13 - invalid */
660
661
662/** Opcode VEX.66.0F3A 0x14 - vpextrb Eb, Vdq, Ib */
663FNIEMOP_DEF(iemOp_vpextrb_Eb_Vdq_Ib)
664{
665 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRB, vpextrb, Eb, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
667 if (IEM_IS_MODRM_REG_MODE(bRm))
668 {
669 /*
670 * greg32, XMM, imm8.
671 */
672 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
673 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
674 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
675 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
676 IEM_MC_PREPARE_AVX_USAGE();
677
678 IEM_MC_LOCAL(uint8_t, uValue);
679 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
680 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
681 IEM_MC_ADVANCE_RIP_AND_FINISH();
682 IEM_MC_END();
683 }
684 else
685 {
686 /*
687 * [mem8], XMM, imm8.
688 */
689 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
690 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
693
694 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
695 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
696 IEM_MC_PREPARE_AVX_USAGE();
697
698 IEM_MC_LOCAL(uint8_t, uValue);
699 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
700 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
701 IEM_MC_ADVANCE_RIP_AND_FINISH();
702 IEM_MC_END();
703 }
704}
705
706
707/** Opcode VEX.66.0F3A 0x15 - vpextrw Ew, Vdq, Ib */
708FNIEMOP_DEF(iemOp_vpextrw_Ew_Vdq_Ib)
709{
710 /** @todo testcase: check that this ignores VEX.W. */
711 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRW, vpextrw, Ew_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
713 if (IEM_IS_MODRM_REG_MODE(bRm))
714 {
715 /*
716 * greg32, XMM, imm8.
717 */
718 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
719 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
720 IEM_MC_LOCAL(uint16_t, uValue);
721
722 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
723 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
724 IEM_MC_PREPARE_AVX_USAGE();
725
726 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
727 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
728 IEM_MC_ADVANCE_RIP_AND_FINISH();
729 IEM_MC_END();
730 }
731 else
732 {
733 /*
734 * [mem16], XMM, imm8.
735 */
736 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
737 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
738 IEM_MC_LOCAL(uint16_t, uValue);
739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
741
742 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
743 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
744 IEM_MC_PREPARE_AVX_USAGE();
745
746 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
747 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
748 IEM_MC_ADVANCE_RIP_AND_FINISH();
749 IEM_MC_END();
750 }
751}
752
753
754/** Opcode VEX.66.0F3A 0x16 - vpextrd / vpextrq Eq / Ey, Vdq, Ib */
755FNIEMOP_DEF(iemOp_vpextrd_q_Ey_Vdq_Ib)
756{
757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
758 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
759 {
760 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRQ, vpextrq, Eq_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ONE);
761 if (IEM_IS_MODRM_REG_MODE(bRm))
762 {
763 /*
764 * greg64, XMM, imm8.
765 */
766 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
767 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
768 IEM_MC_LOCAL(uint64_t, uValue);
769
770 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
771 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
772 IEM_MC_PREPARE_AVX_USAGE();
773
774 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
775 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
776 IEM_MC_ADVANCE_RIP_AND_FINISH();
777 IEM_MC_END();
778 }
779 else
780 {
781 /*
782 * [mem64], XMM, imm8.
783 */
784 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
785 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
786 IEM_MC_LOCAL(uint64_t, uValue);
787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
789
790 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
791 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
792 IEM_MC_PREPARE_AVX_USAGE();
793
794 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
795 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 }
800 else
801 {
802 /**
803 * @opdone
804 */
805 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRD, vpextrd, Ey_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ZERO);
806 if (IEM_IS_MODRM_REG_MODE(bRm))
807 {
808 /*
809 * greg32, XMM, imm8.
810 */
811 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
812 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
813 IEM_MC_LOCAL(uint32_t, uValue);
814
815 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
816 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
817 IEM_MC_PREPARE_AVX_USAGE();
818
819 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
820 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
821 IEM_MC_ADVANCE_RIP_AND_FINISH();
822 IEM_MC_END();
823 }
824 else
825 {
826 /*
827 * [mem32], XMM, imm8.
828 */
829 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
830 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
831 IEM_MC_LOCAL(uint32_t, uValue);
832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
834
835 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
836 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
837 IEM_MC_PREPARE_AVX_USAGE();
838
839 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
840 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
841 IEM_MC_ADVANCE_RIP_AND_FINISH();
842 IEM_MC_END();
843 }
844 }
845}
846
847
848/** Opcode VEX.66.0F3A 0x17. */
849FNIEMOP_DEF(iemOp_vextractps_Ed_Vdq_Ib)
850{
851 //IEMOP_MNEMONIC3(VEX_MRI_REG, VEXTRACTPS, vextractps, Ed, Vdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO);
852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
853 if (IEM_IS_MODRM_REG_MODE(bRm))
854 {
855 /*
856 * greg32, XMM, imm8.
857 */
858 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
859 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
860 IEM_MC_LOCAL(uint32_t, uSrc);
861
862 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
863 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
864 IEM_MC_PREPARE_AVX_USAGE();
865
866 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
867 IEM_MC_STORE_GREG_U32( IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
868 IEM_MC_ADVANCE_RIP_AND_FINISH();
869 IEM_MC_END();
870 }
871 else
872 {
873 /*
874 * [mem32], XMM, imm8.
875 */
876 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
877 IEM_MC_LOCAL(uint32_t, uSrc);
878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
879
880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
881 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
882 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
883 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
884 IEM_MC_PREPARE_AVX_USAGE();
885
886 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
887 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
888 IEM_MC_ADVANCE_RIP_AND_FINISH();
889 IEM_MC_END();
890 }
891}
892
893
894/** Opcode VEX.66.0F3A 0x18 (vex only). */
895FNIEMOP_DEF(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib)
896{
897 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTF128, vinsertf128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
898 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
899 if (IEM_IS_MODRM_REG_MODE(bRm))
900 {
901 /*
902 * Register, register.
903 */
904 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
905 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
906 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
907 IEM_MC_LOCAL(RTUINT128U, uSrc);
908
909 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
910 IEM_MC_PREPARE_AVX_USAGE();
911
912 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
913 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
914 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
915
916 IEM_MC_ADVANCE_RIP_AND_FINISH();
917 IEM_MC_END();
918 }
919 else
920 {
921 /*
922 * Register, memory.
923 */
924 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
925 IEM_MC_LOCAL(RTUINT128U, uSrc);
926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
927
928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
929 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
930 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
931 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
932 IEM_MC_PREPARE_AVX_USAGE();
933
934 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
935 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
936 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
937
938 IEM_MC_ADVANCE_RIP_AND_FINISH();
939 IEM_MC_END();
940 }
941}
942
943
944/** Opcode VEX.66.0F3A 0x19 (vex only). */
945FNIEMOP_DEF(iemOp_vextractf128_Wdq_Vqq_Ib)
946{
947 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTF128, vextractf128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
949 if (IEM_IS_MODRM_REG_MODE(bRm))
950 {
951 /*
952 * Register, register.
953 */
954 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
955 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
956 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
957
958 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
959 IEM_MC_PREPARE_AVX_USAGE();
960
961 IEM_MC_LOCAL(RTUINT128U, uDst);
962 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
963 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
964
965 IEM_MC_ADVANCE_RIP_AND_FINISH();
966 IEM_MC_END();
967 }
968 else
969 {
970 /*
971 * Register, memory.
972 */
973 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
975
976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
977 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
978 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
979 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
980 IEM_MC_PREPARE_AVX_USAGE();
981
982 IEM_MC_LOCAL(RTUINT128U, uDst);
983 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
984 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
985
986 IEM_MC_ADVANCE_RIP_AND_FINISH();
987 IEM_MC_END();
988 }
989}
990
991
992/* Opcode VEX.66.0F3A 0x1a - invalid */
993/* Opcode VEX.66.0F3A 0x1b - invalid */
994/* Opcode VEX.66.0F3A 0x1c - invalid */
995/** Opcode VEX.66.0F3A 0x1d (vex only). */
996FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
997/* Opcode VEX.66.0F3A 0x1e - invalid */
998/* Opcode VEX.66.0F3A 0x1f - invalid */
999
1000
1001/** Opcode VEX.66.0F3A 0x20. */
1002FNIEMOP_DEF(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib)
1003{
1004 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRB, vpinsrb, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1006 if (IEM_IS_MODRM_REG_MODE(bRm))
1007 {
1008 /*
1009 * Register, register.
1010 */
1011 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1012 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1013 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1014 IEM_MC_LOCAL(uint8_t, uValue);
1015
1016 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1017 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1018 IEM_MC_PREPARE_AVX_USAGE();
1019
1020 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1021 IEM_MC_FETCH_GREG_U8(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1022 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1023 IEM_MC_STORE_XREG_U8( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15, uValue);
1024 IEM_MC_ADVANCE_RIP_AND_FINISH();
1025 IEM_MC_END();
1026 }
1027 else
1028 {
1029 /*
1030 * Register, memory.
1031 */
1032 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1034 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1035 IEM_MC_LOCAL(uint8_t, uValue);
1036
1037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1038 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1039 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1040 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1041 IEM_MC_PREPARE_AVX_USAGE();
1042
1043 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1044 IEM_MC_FETCH_MEM_U8(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1045 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1046 IEM_MC_STORE_XREG_U8( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15, uValue);
1047 IEM_MC_ADVANCE_RIP_AND_FINISH();
1048 IEM_MC_END();
1049 }
1050}
1051
1052
1053/** Opcode VEX.66.0F3A 0x21, */
1054FNIEMOP_DEF(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib)
1055{
1056 //IEMOP_MNEMONIC4(VEX_RVMR_REG, VINSERTPS, vinsertps, Vdq, Hdq, UdqMd, Ib, DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO); /// @todo
1057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1058 if (IEM_IS_MODRM_REG_MODE(bRm))
1059 {
1060 /*
1061 * XMM, XMM, XMM, imm8.
1062 */
1063 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1064 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1065 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1066 IEM_MC_LOCAL(uint32_t, uSrc2);
1067
1068 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1069 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1070 IEM_MC_PREPARE_AVX_USAGE();
1071
1072 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1073 IEM_MC_FETCH_XREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), (bImm >> 6) & 3);
1074 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1075 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2);
1076 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
1077 IEM_MC_ADVANCE_RIP_AND_FINISH();
1078 IEM_MC_END();
1079 }
1080 else
1081 {
1082 /*
1083 * XMM, XMM, [mem32], imm8.
1084 */
1085 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1087 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1088 IEM_MC_LOCAL(uint32_t, uSrc2);
1089
1090 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1091 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1092 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1093 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1094 IEM_MC_PREPARE_AVX_USAGE();
1095
1096 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1097 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1098 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1099 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2);
1100 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
1101 IEM_MC_ADVANCE_RIP_AND_FINISH();
1102 IEM_MC_END();
1103 }
1104}
1105
1106
1107/** Opcode VEX.66.0F3A 0x22. */
1108FNIEMOP_DEF(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib)
1109{
1110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1111 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1112 {
1113 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRQ, vpinsrq, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1114 if (IEM_IS_MODRM_REG_MODE(bRm))
1115 {
1116 /*
1117 * Register, register.
1118 */
1119 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1120 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1121 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1122 IEM_MC_LOCAL(uint64_t, uValue);
1123
1124 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1125 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1126 IEM_MC_PREPARE_AVX_USAGE();
1127
1128 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1129 IEM_MC_FETCH_GREG_U64(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1130 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1131 IEM_MC_STORE_XREG_U64( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uValue);
1132 IEM_MC_ADVANCE_RIP_AND_FINISH();
1133 IEM_MC_END();
1134 }
1135 else
1136 {
1137 /*
1138 * Register, memory.
1139 */
1140 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1142 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1143 IEM_MC_LOCAL(uint64_t, uValue);
1144
1145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1146 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1147 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1148 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1149 IEM_MC_PREPARE_AVX_USAGE();
1150
1151 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1152 IEM_MC_FETCH_MEM_U64(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1153 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1154 IEM_MC_STORE_XREG_U64( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uValue);
1155 IEM_MC_ADVANCE_RIP_AND_FINISH();
1156 IEM_MC_END();
1157 }
1158 }
1159 else
1160 {
1161 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRD, vpinsrd, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1162 if (IEM_IS_MODRM_REG_MODE(bRm))
1163 {
1164 /*
1165 * Register, register.
1166 */
1167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1169 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1170 IEM_MC_LOCAL(uint32_t, uValue);
1171
1172 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1173 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1174 IEM_MC_PREPARE_AVX_USAGE();
1175
1176 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1177 IEM_MC_FETCH_GREG_U32(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1178 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1179 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
1180 IEM_MC_ADVANCE_RIP_AND_FINISH();
1181 IEM_MC_END();
1182 }
1183 else
1184 {
1185 /*
1186 * Register, memory.
1187 */
1188 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1190 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1191 IEM_MC_LOCAL(uint32_t, uValue);
1192
1193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1194 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1195 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1196 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1197 IEM_MC_PREPARE_AVX_USAGE();
1198
1199 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1200 IEM_MC_FETCH_MEM_U32(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1201 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1202 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
1203 IEM_MC_ADVANCE_RIP_AND_FINISH();
1204 IEM_MC_END();
1205 }
1206 }
1207}
1208
1209
1210/* Opcode VEX.66.0F3A 0x23 - invalid */
1211/* Opcode VEX.66.0F3A 0x24 - invalid */
1212/* Opcode VEX.66.0F3A 0x25 - invalid */
1213/* Opcode VEX.66.0F3A 0x26 - invalid */
1214/* Opcode VEX.66.0F3A 0x27 - invalid */
1215/* Opcode VEX.66.0F3A 0x28 - invalid */
1216/* Opcode VEX.66.0F3A 0x29 - invalid */
1217/* Opcode VEX.66.0F3A 0x2a - invalid */
1218/* Opcode VEX.66.0F3A 0x2b - invalid */
1219/* Opcode VEX.66.0F3A 0x2c - invalid */
1220/* Opcode VEX.66.0F3A 0x2d - invalid */
1221/* Opcode VEX.66.0F3A 0x2e - invalid */
1222/* Opcode VEX.66.0F3A 0x2f - invalid */
1223
1224
1225/* Opcode VEX.66.0F3A 0x30 - invalid */
1226/* Opcode VEX.66.0F3A 0x31 - invalid */
1227/* Opcode VEX.66.0F3A 0x32 - invalid */
1228/* Opcode VEX.66.0F3A 0x33 - invalid */
1229/* Opcode VEX.66.0F3A 0x34 - invalid */
1230/* Opcode VEX.66.0F3A 0x35 - invalid */
1231/* Opcode VEX.66.0F3A 0x36 - invalid */
1232/* Opcode VEX.66.0F3A 0x37 - invalid */
1233
1234
1235/** Opcode VEX.66.0F3A 0x38 (vex only). */
1236FNIEMOP_DEF(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib)
1237{
1238 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTI128, vinserti128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1240 if (IEM_IS_MODRM_REG_MODE(bRm))
1241 {
1242 /*
1243 * Register, register.
1244 */
1245 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1246 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1247 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1248 IEM_MC_LOCAL(RTUINT128U, uSrc);
1249
1250 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1251 IEM_MC_PREPARE_AVX_USAGE();
1252
1253 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1254 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1255 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1256
1257 IEM_MC_ADVANCE_RIP_AND_FINISH();
1258 IEM_MC_END();
1259 }
1260 else
1261 {
1262 /*
1263 * Register, memory.
1264 */
1265 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1266 IEM_MC_LOCAL(RTUINT128U, uSrc);
1267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1268
1269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1270 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1271 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1272 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1273 IEM_MC_PREPARE_AVX_USAGE();
1274
1275 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1276 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1277 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1278
1279 IEM_MC_ADVANCE_RIP_AND_FINISH();
1280 IEM_MC_END();
1281 }
1282}
1283
1284
1285/** Opcode VEX.66.0F3A 0x39 (vex only). */
1286FNIEMOP_DEF(iemOp_vextracti128_Wdq_Vqq_Ib)
1287{
1288 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTI128, vextracti128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
1289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1290 if (IEM_IS_MODRM_REG_MODE(bRm))
1291 {
1292 /*
1293 * Register, register.
1294 */
1295 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1296 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1297 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1298
1299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1300 IEM_MC_PREPARE_AVX_USAGE();
1301
1302 IEM_MC_LOCAL(RTUINT128U, uDst);
1303 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1304 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
1305
1306 IEM_MC_ADVANCE_RIP_AND_FINISH();
1307 IEM_MC_END();
1308 }
1309 else
1310 {
1311 /*
1312 * Register, memory.
1313 */
1314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1316
1317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1318 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1319 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1320 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1321 IEM_MC_PREPARE_AVX_USAGE();
1322
1323 IEM_MC_LOCAL(RTUINT128U, uDst);
1324 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1325 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
1326
1327 IEM_MC_ADVANCE_RIP_AND_FINISH();
1328 IEM_MC_END();
1329 }
1330}
1331
1332
1333/* Opcode VEX.66.0F3A 0x3a - invalid */
1334/* Opcode VEX.66.0F3A 0x3b - invalid */
1335/* Opcode VEX.66.0F3A 0x3c - invalid */
1336/* Opcode VEX.66.0F3A 0x3d - invalid */
1337/* Opcode VEX.66.0F3A 0x3e - invalid */
1338/* Opcode VEX.66.0F3A 0x3f - invalid */
1339
1340
1341/** Opcode VEX.66.0F3A 0x40. */
1342FNIEMOP_STUB(iemOp_vdpps_Vx_Hx_Wx_Ib);
1343/** Opcode VEX.66.0F3A 0x41, */
1344FNIEMOP_STUB(iemOp_vdppd_Vdq_Hdq_Wdq_Ib);
1345
1346
1347/** Opcode VEX.66.0F3A 0x42. */
1348FNIEMOP_DEF(iemOp_vmpsadbw_Vx_Hx_Wx_Ib)
1349{
1350 IEMOP_MNEMONIC4(VEX_RVMI, VMPSADBW, vmpsadbw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1351 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vmpsadbw);
1352 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1353}
1354
1355
1356/* Opcode VEX.66.0F3A 0x43 - invalid */
1357
1358
1359/** Opcode VEX.66.0F3A 0x44. */
1360FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
1361{
1362 IEMOP_MNEMONIC4(VEX_RVMI, VPCLMULQDQ, vpclmulqdq, Vdq_WO, Hdq, Wdq, Id, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1364 if (IEM_IS_MODRM_REG_MODE(bRm))
1365 {
1366 /*
1367 * Register, register.
1368 */
1369 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1370 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1371 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1372 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1373 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1374 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1375 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1376 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1377 IEM_MC_PREPARE_AVX_USAGE();
1378 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1379 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1380 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1381 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1382 puDst, puSrc1, puSrc2, bImmArg);
1383 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1384 IEM_MC_ADVANCE_RIP_AND_FINISH();
1385 IEM_MC_END();
1386 }
1387 else
1388 {
1389 /*
1390 * Register, memory.
1391 */
1392 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1393 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1395 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1396 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1397 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1400 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1401 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1402 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1403 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1404 IEM_MC_PREPARE_AVX_USAGE();
1405
1406 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1407 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1408 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1409 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1410 puDst, puSrc1, puSrc2, bImmArg);
1411 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1412
1413 IEM_MC_ADVANCE_RIP_AND_FINISH();
1414 IEM_MC_END();
1415 }
1416}
1417
1418
1419/* Opcode VEX.66.0F3A 0x45 - invalid */
1420
1421
1422/** Opcode VEX.66.0F3A 0x46 (vex only) */
1423FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib)
1424{
1425 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2I128, vperm2i128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ONE);
1426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1427 if (IEM_IS_MODRM_REG_MODE(bRm))
1428 {
1429 /*
1430 * Register, register.
1431 */
1432 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1433 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1434 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1435 IEM_MC_LOCAL(RTUINT256U, uDst);
1436 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1437 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1438 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1439 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1440 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1441 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1442 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1443 IEM_MC_PREPARE_AVX_USAGE();
1444 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1445 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1446 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1447 puDst, puSrc1, puSrc2, bImmArg);
1448 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1449 IEM_MC_ADVANCE_RIP_AND_FINISH();
1450 IEM_MC_END();
1451 }
1452 else
1453 {
1454 /*
1455 * Register, memory.
1456 */
1457 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1458 IEM_MC_LOCAL(RTUINT256U, uDst);
1459 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1460 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1462 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1463 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1464 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1465
1466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1467 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1468 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1469 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1470 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1471 IEM_MC_PREPARE_AVX_USAGE();
1472
1473 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1474 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1475 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1476 puDst, puSrc1, puSrc2, bImmArg);
1477 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1478
1479 IEM_MC_ADVANCE_RIP_AND_FINISH();
1480 IEM_MC_END();
1481 }
1482}
1483
1484
1485/* Opcode VEX.66.0F3A 0x47 - invalid */
1486/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
1487FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
1488/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
1489FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
1490
1491
1492/**
1493 * Common worker for AVX2 instructions on the forms:
1494 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
1495 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
1496 *
1497 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
1498 * Additionally, it triggers \#UD if VEX.W is 1.
1499 */
1500FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1501{
1502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1503 if (IEM_IS_MODRM_REG_MODE(bRm))
1504 {
1505 /*
1506 * Register, register.
1507 */
1508 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1509 if (pVCpu->iem.s.uVexLength)
1510 {
1511 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1512 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1513 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1514 IEM_MC_PREPARE_AVX_USAGE();
1515 IEM_MC_LOCAL(RTUINT256U, uDst);
1516 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1517 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1518 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1519 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1520 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1521 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1522 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1523 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1524 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1525 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1526 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1527 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1528 IEM_MC_ADVANCE_RIP_AND_FINISH();
1529 IEM_MC_END();
1530 }
1531 else
1532 {
1533 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1534 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1535 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1536 IEM_MC_PREPARE_AVX_USAGE();
1537 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1538 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1539 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1540 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1541 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1542 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1543 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1544 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1545 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1546 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1547 IEM_MC_ADVANCE_RIP_AND_FINISH();
1548 IEM_MC_END();
1549 }
1550 }
1551 else
1552 {
1553 /*
1554 * Register, memory.
1555 */
1556 if (pVCpu->iem.s.uVexLength)
1557 {
1558 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1561 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1562 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1563 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1564 IEM_MC_PREPARE_AVX_USAGE();
1565
1566 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1567 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1568 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1569
1570 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1571 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1572 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1573 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1574 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1575 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1576 IEM_MC_LOCAL(RTUINT256U, uDst);
1577 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1578 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1579 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1580
1581 IEM_MC_ADVANCE_RIP_AND_FINISH();
1582 IEM_MC_END();
1583 }
1584 else
1585 {
1586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1589 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1590 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1591 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1592 IEM_MC_PREPARE_AVX_USAGE();
1593
1594 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1595 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1596 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1597
1598 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1599 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1600 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1601 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1602 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1603 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1604 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1605 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1606
1607 IEM_MC_ADVANCE_RIP_AND_FINISH();
1608 IEM_MC_END();
1609 }
1610 }
1611}
1612
1613
1614/** Opcode VEX.66.0F3A 0x4a (vex only).
1615 * AVX, AVX */
1616FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
1617{
1618 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPS, vblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
1619 IEMOPBLENDOP_INIT_VARS(vblendvps);
1620 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1621}
1622
1623
1624/** Opcode VEX.66.0F3A 0x4b (vex only).
1625 * AVX, AVX */
1626FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
1627{
1628 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPD, vblendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
1629 IEMOPBLENDOP_INIT_VARS(vblendvpd);
1630 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1631}
1632
1633
1634/**
1635 * Common worker for AVX2 instructions on the forms:
1636 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
1637 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
1638 *
1639 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
1640 * Additionally, both VEX.W and VEX.L must be zero.
1641 */
1642FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1643{
1644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1645 if (IEM_IS_MODRM_REG_MODE(bRm))
1646 {
1647 /*
1648 * Register, register.
1649 */
1650 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1651 if (pVCpu->iem.s.uVexLength)
1652 {
1653 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1654 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
1655 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1656 IEM_MC_PREPARE_AVX_USAGE();
1657
1658 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1659 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1660 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1661
1662 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1663 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1664 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1665
1666 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1667 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1668 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1669
1670 IEM_MC_LOCAL(RTUINT256U, uDst);
1671 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1672
1673 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1674
1675 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1676 IEM_MC_ADVANCE_RIP_AND_FINISH();
1677 IEM_MC_END();
1678 }
1679 else
1680 {
1681 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1682 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1683 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1684 IEM_MC_PREPARE_AVX_USAGE();
1685 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1686 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1687 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1688 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1689 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1690 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1691 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1692 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1693 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1694 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1695 IEM_MC_ADVANCE_RIP_AND_FINISH();
1696 IEM_MC_END();
1697 }
1698 }
1699 else
1700 {
1701 /*
1702 * Register, memory.
1703 */
1704 if (pVCpu->iem.s.uVexLength)
1705 {
1706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1708
1709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1710 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1711
1712 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
1713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1714 IEM_MC_PREPARE_AVX_USAGE();
1715
1716 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1717 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1718 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1719
1720 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1721 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1722 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1723
1724 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1725 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1726 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1727
1728 IEM_MC_LOCAL(RTUINT256U, uDst);
1729 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1730
1731 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1732
1733 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1734 IEM_MC_ADVANCE_RIP_AND_FINISH();
1735 IEM_MC_END();
1736 }
1737 else
1738 {
1739 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1742 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1743
1744 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1746 IEM_MC_PREPARE_AVX_USAGE();
1747
1748 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1749 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1750 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1751
1752 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1753 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1754 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1755 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1756 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1757 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1758 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1759 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1760
1761 IEM_MC_ADVANCE_RIP_AND_FINISH();
1762 IEM_MC_END();
1763 }
1764 }
1765}
1766
1767
1768/** Opcode VEX.66.0F3A 0x4c (vex only).
1769 * AVX, AVX2 */
1770FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
1771{
1772 /** @todo testcase: cover VEX.W=1 and check that it triggers \#UD on both real
1773 * and emulated hardware. */
1774 IEMOP_MNEMONIC4(VEX_RVMR, VPBLENDVB, vpblendvb, Vx_WO, Hx, Wx, Lx, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_W_ZERO);
1775 IEMOPBLENDOP_INIT_VARS(vpblendvb);
1776 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1777}
1778
1779
1780/* Opcode VEX.66.0F3A 0x4d - invalid */
1781/* Opcode VEX.66.0F3A 0x4e - invalid */
1782/* Opcode VEX.66.0F3A 0x4f - invalid */
1783
1784
1785/* Opcode VEX.66.0F3A 0x50 - invalid */
1786/* Opcode VEX.66.0F3A 0x51 - invalid */
1787/* Opcode VEX.66.0F3A 0x52 - invalid */
1788/* Opcode VEX.66.0F3A 0x53 - invalid */
1789/* Opcode VEX.66.0F3A 0x54 - invalid */
1790/* Opcode VEX.66.0F3A 0x55 - invalid */
1791/* Opcode VEX.66.0F3A 0x56 - invalid */
1792/* Opcode VEX.66.0F3A 0x57 - invalid */
1793/* Opcode VEX.66.0F3A 0x58 - invalid */
1794/* Opcode VEX.66.0F3A 0x59 - invalid */
1795/* Opcode VEX.66.0F3A 0x5a - invalid */
1796/* Opcode VEX.66.0F3A 0x5b - invalid */
1797/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
1798FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
1799/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
1800FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
1801/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
1802FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
1803/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
1804FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
1805
1806
1807/**
1808 * @opcode 0x60
1809 * @oppfx 0x66
1810 * @opflmodify cf,pf,af,zf,sf,of
1811 * @opflclear pf,af
1812 */
1813FNIEMOP_DEF(iemOp_vpcmpestrm_Vdq_Wdq_Ib)
1814{
1815 IEMOP_MNEMONIC3(VEX_RMI, VPCMPESTRM, vpcmpestrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1816
1817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1818 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1819 {
1820 if (IEM_IS_MODRM_REG_MODE(bRm))
1821 {
1822 /*
1823 * Register, register.
1824 */
1825 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1826 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1827 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1828 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1829 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1830 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1831 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1832 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1834 IEM_MC_PREPARE_SSE_USAGE();
1835 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1836 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1837 IEM_MC_REF_EFLAGS(pEFlags);
1838 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
1839 iemAImpl_vpcmpestrm_u128,
1840 iemAImpl_vpcmpestrm_u128_fallback),
1841 puDst, pEFlags, pSrc, bImmArg);
1842 IEM_MC_ADVANCE_RIP_AND_FINISH();
1843 IEM_MC_END();
1844 }
1845 else
1846 {
1847 /*
1848 * Register, memory.
1849 */
1850 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1851 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1852 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1853 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1854 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1856
1857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1858 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1859 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1860 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1862 IEM_MC_PREPARE_SSE_USAGE();
1863
1864 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1865 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1866 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1867 IEM_MC_REF_EFLAGS(pEFlags);
1868 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
1869 iemAImpl_vpcmpestrm_u128,
1870 iemAImpl_vpcmpestrm_u128_fallback),
1871 puDst, pEFlags, pSrc, bImmArg);
1872 IEM_MC_ADVANCE_RIP_AND_FINISH();
1873 IEM_MC_END();
1874 }
1875 }
1876 else
1877 {
1878 if (IEM_IS_MODRM_REG_MODE(bRm))
1879 {
1880 /*
1881 * Register, register.
1882 */
1883 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1884 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1885 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1886 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1887 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1888 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1889 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1890 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1892 IEM_MC_PREPARE_SSE_USAGE();
1893 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1894 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1895 IEM_MC_REF_EFLAGS(pEFlags);
1896 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1897 iemAImpl_vpcmpestrm_u128,
1898 iemAImpl_vpcmpestrm_u128_fallback),
1899 puDst, pEFlags, pSrc, bImmArg);
1900 IEM_MC_ADVANCE_RIP_AND_FINISH();
1901 IEM_MC_END();
1902 }
1903 else
1904 {
1905 /*
1906 * Register, memory.
1907 */
1908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1909 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1910 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1911 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1912 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1914
1915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1916 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1917 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1918 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1919 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1920 IEM_MC_PREPARE_SSE_USAGE();
1921
1922 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1923 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1924 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1925 IEM_MC_REF_EFLAGS(pEFlags);
1926 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
1927 iemAImpl_vpcmpestrm_u128,
1928 iemAImpl_vpcmpestrm_u128_fallback),
1929 puDst, pEFlags, pSrc, bImmArg);
1930 IEM_MC_ADVANCE_RIP_AND_FINISH();
1931 IEM_MC_END();
1932 }
1933 }
1934}
1935
1936
1937/**
1938 * @opcode 0x61
1939 * @oppfx 0x66
1940 * @opflmodify cf,pf,af,zf,sf,of
1941 * @opflclear pf,af
1942 */
1943FNIEMOP_DEF(iemOp_vpcmpestri_Vdq_Wdq_Ib)
1944{
1945 IEMOP_MNEMONIC3(VEX_RMI, VPCMPESTRI, vpcmpestri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1946
1947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1948 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1949 {
1950 if (IEM_IS_MODRM_REG_MODE(bRm))
1951 {
1952 /*
1953 * Register, register.
1954 */
1955 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1956 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1957 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1958 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1959 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1960 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1961 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1962 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1964 IEM_MC_PREPARE_SSE_USAGE();
1965 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1966 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1967 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
1968 IEM_MC_REF_EFLAGS(pEFlags);
1969 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
1970 iemAImpl_vpcmpestri_u128,
1971 iemAImpl_vpcmpestri_u128_fallback),
1972 pu32Ecx, pEFlags, pSrc, bImmArg);
1973 /** @todo testcase: High dword of RCX cleared? */
1974 IEM_MC_ADVANCE_RIP_AND_FINISH();
1975 IEM_MC_END();
1976 }
1977 else
1978 {
1979 /*
1980 * Register, memory.
1981 */
1982 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
1983 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1984 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1985 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1986 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1988
1989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1990 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1991 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1992 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1993 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1994 IEM_MC_PREPARE_SSE_USAGE();
1995
1996 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1997 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1998 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1999 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2000 IEM_MC_REF_EFLAGS(pEFlags);
2001 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2002 iemAImpl_vpcmpestri_u128,
2003 iemAImpl_vpcmpestri_u128_fallback),
2004 pu32Ecx, pEFlags, pSrc, bImmArg);
2005 /** @todo testcase: High dword of RCX cleared? */
2006 IEM_MC_ADVANCE_RIP_AND_FINISH();
2007 IEM_MC_END();
2008 }
2009 }
2010 else
2011 {
2012 if (IEM_IS_MODRM_REG_MODE(bRm))
2013 {
2014 /*
2015 * Register, register.
2016 */
2017 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2018 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2019 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2020 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2021 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2022 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2023 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2024 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2025 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2026 IEM_MC_PREPARE_SSE_USAGE();
2027 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2028 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2029 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2030 IEM_MC_REF_EFLAGS(pEFlags);
2031 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2032 iemAImpl_vpcmpestri_u128,
2033 iemAImpl_vpcmpestri_u128_fallback),
2034 pu32Ecx, pEFlags, pSrc, bImmArg);
2035 /** @todo testcase: High dword of RCX cleared? */
2036 IEM_MC_ADVANCE_RIP_AND_FINISH();
2037 IEM_MC_END();
2038 }
2039 else
2040 {
2041 /*
2042 * Register, memory.
2043 */
2044 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2045 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2046 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2047 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2048 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2050
2051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2052 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2053 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2054 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2055 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2056 IEM_MC_PREPARE_SSE_USAGE();
2057
2058 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2059 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2060 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2061 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2062 IEM_MC_REF_EFLAGS(pEFlags);
2063 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2064 iemAImpl_vpcmpestri_u128,
2065 iemAImpl_vpcmpestri_u128_fallback),
2066 pu32Ecx, pEFlags, pSrc, bImmArg);
2067 /** @todo testcase: High dword of RCX cleared? */
2068 IEM_MC_ADVANCE_RIP_AND_FINISH();
2069 IEM_MC_END();
2070 }
2071 }
2072}
2073
2074
2075/**
2076 * @opcode 0x62
2077 * @oppfx 0x66
2078 * @opflmodify cf,pf,af,zf,sf,of
2079 * @opflclear pf,af
2080 */
2081FNIEMOP_DEF(iemOp_vpcmpistrm_Vdq_Wdq_Ib)
2082{
2083 IEMOP_MNEMONIC3(VEX_RMI, VPCMPISTRM, vpcmpistrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2084
2085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2086 if (IEM_IS_MODRM_REG_MODE(bRm))
2087 {
2088 /*
2089 * Register, register.
2090 */
2091 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2093 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2094 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2095 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2096 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
2097 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
2098 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2099 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2100 IEM_MC_PREPARE_SSE_USAGE();
2101 IEM_MC_FETCH_XREG_PAIR_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2102 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2103 IEM_MC_REF_EFLAGS(pEFlags);
2104 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2105 iemAImpl_vpcmpistrm_u128,
2106 iemAImpl_vpcmpistrm_u128_fallback),
2107 puDst, pEFlags, pSrc, bImmArg);
2108 IEM_MC_ADVANCE_RIP_AND_FINISH();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Register, memory.
2115 */
2116 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2117 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2118 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2119 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
2120 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
2121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2122
2123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2124 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2125 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2126 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2127 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2128 IEM_MC_PREPARE_SSE_USAGE();
2129
2130 IEM_MC_FETCH_MEM_U128_AND_XREG_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2131 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2132 IEM_MC_REF_EFLAGS(pEFlags);
2133 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2134 iemAImpl_vpcmpistrm_u128,
2135 iemAImpl_vpcmpistrm_u128_fallback),
2136 puDst, pEFlags, pSrc, bImmArg);
2137 IEM_MC_ADVANCE_RIP_AND_FINISH();
2138 IEM_MC_END();
2139 }
2140}
2141
2142
2143/**
2144 * @opcode 0x63
2145 * @oppfx 0x66
2146 * @opflmodify cf,pf,af,zf,sf,of
2147 * @opflclear pf,af
2148 */
2149FNIEMOP_DEF(iemOp_vpcmpistri_Vdq_Wdq_Ib)
2150{
2151 IEMOP_MNEMONIC3(VEX_RMI, VPCMPISTRI, vpcmpistri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2152
2153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2154 if (IEM_IS_MODRM_REG_MODE(bRm))
2155 {
2156 /*
2157 * Register, register.
2158 */
2159 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2160 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2161 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2162 IEM_MC_ARG(uint32_t *, pEFlags, 0);
2163 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
2164 IEM_MC_ARG(PCRTUINT128U, pSrc2, 2);
2165 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2166 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2167 IEM_MC_PREPARE_SSE_USAGE();
2168 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2169 IEM_MC_REF_XREG_U128_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2170 IEM_MC_REF_EFLAGS(pEFlags);
2171 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
2172 IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2173 iemAImpl_vpcmpistri_u128,
2174 iemAImpl_vpcmpistri_u128_fallback),
2175 pEFlags, pSrc1, pSrc2, bImmArg);
2176 /** @todo testcase: High dword of RCX cleared? */
2177 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
2178 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2179
2180 IEM_MC_ADVANCE_RIP_AND_FINISH();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 /*
2186 * Register, memory.
2187 */
2188 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2189 IEM_MC_ARG(uint32_t *, pEFlags, 0);
2190 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
2191 IEM_MC_LOCAL(RTUINT128U, Src2);
2192 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc2, Src2, 2);
2193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2194
2195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2196 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2197 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2198 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2199 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2200 IEM_MC_PREPARE_SSE_USAGE();
2201
2202 IEM_MC_FETCH_MEM_U128(Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2203 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2204 IEM_MC_REF_EFLAGS(pEFlags);
2205 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
2206 IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2207 iemAImpl_vpcmpistri_u128,
2208 iemAImpl_vpcmpistri_u128_fallback),
2209 pEFlags, pSrc1, pSrc2, bImmArg);
2210 /** @todo testcase: High dword of RCX cleared? */
2211 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
2212 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2213 IEM_MC_ADVANCE_RIP_AND_FINISH();
2214 IEM_MC_END();
2215 }
2216}
2217
2218
2219/* Opcode VEX.66.0F3A 0x64 - invalid */
2220/* Opcode VEX.66.0F3A 0x65 - invalid */
2221/* Opcode VEX.66.0F3A 0x66 - invalid */
2222/* Opcode VEX.66.0F3A 0x67 - invalid */
2223/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
2224FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
2225/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
2226FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
2227/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
2228FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
2229/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
2230FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
2231/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
2232FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
2233/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
2234FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
2235/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
2236FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
2237/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
2238FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
2239
2240/* Opcode VEX.66.0F3A 0x70 - invalid */
2241/* Opcode VEX.66.0F3A 0x71 - invalid */
2242/* Opcode VEX.66.0F3A 0x72 - invalid */
2243/* Opcode VEX.66.0F3A 0x73 - invalid */
2244/* Opcode VEX.66.0F3A 0x74 - invalid */
2245/* Opcode VEX.66.0F3A 0x75 - invalid */
2246/* Opcode VEX.66.0F3A 0x76 - invalid */
2247/* Opcode VEX.66.0F3A 0x77 - invalid */
2248/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
2249FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
2250/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
2251FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
2252/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
2253FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
2254/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
2255FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
2256/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
2257FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
2258/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
2259FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
2260/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
2261FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
2262/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
2263FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
2264
2265/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
2266
2267
2268/* Opcode 0x0f 0xc0 - invalid */
2269/* Opcode 0x0f 0xc1 - invalid */
2270/* Opcode 0x0f 0xc2 - invalid */
2271/* Opcode 0x0f 0xc3 - invalid */
2272/* Opcode 0x0f 0xc4 - invalid */
2273/* Opcode 0x0f 0xc5 - invalid */
2274/* Opcode 0x0f 0xc6 - invalid */
2275/* Opcode 0x0f 0xc7 - invalid */
2276/* Opcode 0x0f 0xc8 - invalid */
2277/* Opcode 0x0f 0xc9 - invalid */
2278/* Opcode 0x0f 0xca - invalid */
2279/* Opcode 0x0f 0xcb - invalid */
2280/* Opcode 0x0f 0xcc - invalid */
2281/* Opcode 0x0f 0xcd - invalid */
2282/* Opcode 0x0f 0xce - invalid */
2283/* Opcode 0x0f 0xcf - invalid */
2284
2285
2286/* Opcode VEX.66.0F3A 0xd0 - invalid */
2287/* Opcode VEX.66.0F3A 0xd1 - invalid */
2288/* Opcode VEX.66.0F3A 0xd2 - invalid */
2289/* Opcode VEX.66.0F3A 0xd3 - invalid */
2290/* Opcode VEX.66.0F3A 0xd4 - invalid */
2291/* Opcode VEX.66.0F3A 0xd5 - invalid */
2292/* Opcode VEX.66.0F3A 0xd6 - invalid */
2293/* Opcode VEX.66.0F3A 0xd7 - invalid */
2294/* Opcode VEX.66.0F3A 0xd8 - invalid */
2295/* Opcode VEX.66.0F3A 0xd9 - invalid */
2296/* Opcode VEX.66.0F3A 0xda - invalid */
2297/* Opcode VEX.66.0F3A 0xdb - invalid */
2298/* Opcode VEX.66.0F3A 0xdc - invalid */
2299/* Opcode VEX.66.0F3A 0xdd - invalid */
2300/* Opcode VEX.66.0F3A 0xde - invalid */
2301/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
2302FNIEMOP_STUB(iemOp_vaeskeygen_Vdq_Wdq_Ib);
2303
2304
2305/**
2306 * @opcode 0xf0
2307 * @oppfx 0xf2
2308 * @opflclass unchanged
2309 */
2310FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
2311{
2312 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
2313 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2315 if (IEM_IS_MODRM_REG_MODE(bRm))
2316 {
2317 /*
2318 * Register, register.
2319 */
2320 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2321 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2322 {
2323 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2324 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2325 IEM_MC_ARG(uint64_t *, pDst, 0);
2326 IEM_MC_ARG(uint64_t, uSrc1, 1);
2327 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
2328 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
2329 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2330 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
2331 IEM_MC_ADVANCE_RIP_AND_FINISH();
2332 IEM_MC_END();
2333 }
2334 else
2335 {
2336 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2337 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2338 IEM_MC_ARG(uint32_t *, pDst, 0);
2339 IEM_MC_ARG(uint32_t, uSrc1, 1);
2340 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
2341 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
2342 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2343 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
2344 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348 }
2349 else
2350 {
2351 /*
2352 * Register, memory.
2353 */
2354 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2355 {
2356 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2357 IEM_MC_ARG(uint64_t *, pDst, 0);
2358 IEM_MC_ARG(uint64_t, uSrc1, 1);
2359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2361 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2362 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
2363 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2364 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2365 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2366 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
2367 IEM_MC_ADVANCE_RIP_AND_FINISH();
2368 IEM_MC_END();
2369 }
2370 else
2371 {
2372 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2373 IEM_MC_ARG(uint32_t *, pDst, 0);
2374 IEM_MC_ARG(uint32_t, uSrc1, 1);
2375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2377 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2378 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
2379 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2380 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2381 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2382 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
2383 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2384 IEM_MC_ADVANCE_RIP_AND_FINISH();
2385 IEM_MC_END();
2386 }
2387 }
2388}
2389
2390
2391/**
2392 * VEX opcode map \#3.
2393 *
2394 * @sa g_apfnThreeByte0f3a
2395 */
2396const PFNIEMOP g_apfnVexMap3[] =
2397{
2398 /* no prefix, 066h prefix f3h prefix, f2h prefix */
2399 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2400 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermpd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2401 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2402 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2403 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2404 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2405 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2406 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2407 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2408 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2409 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2410 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2411 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2412 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2413 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2414 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2415
2416 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2417 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2418 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2419 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2420 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_Eb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2421 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_Ew_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2422 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_Ey_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2423 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2424 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2425 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2426 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2427 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2428 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2429 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2430 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2431 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2432
2433 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2434 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2435 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2436 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2437 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2438 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2439 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2440 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2441 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2442 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2443 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2444 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2445 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2446 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2447 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2448 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2449
2450 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2451 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2452 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2453 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2454 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2455 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2456 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2457 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2458 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2459 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2460 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2461 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2462 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2463 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2464 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2465 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2466
2467 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2468 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2469 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2470 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2471 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2472 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2473 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2474 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2475 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2476 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2477 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2478 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2479 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2480 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2481 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2482 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2483
2484 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2485 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2486 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2487 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2488 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2489 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2490 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2491 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2492 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2493 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2494 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2495 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2496 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2497 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2498 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2499 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2500
2501 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2502 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2503 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2504 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2505 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2506 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2507 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2508 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2509 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2510 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2511 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2512 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2513 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2514 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2515 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2516 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2517
2518 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2519 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2520 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2521 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2522 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2523 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2524 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2525 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2526 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2527 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2528 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2529 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2530 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2531 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2532 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2533 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2534
2535 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2536 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2537 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2538 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2539 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2540 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2541 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2542 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2543 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2544 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2545 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2546 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2547 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2548 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2549 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2550 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2551
2552 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2553 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2554 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2555 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2556 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2557 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2558 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2559 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2560 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2561 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2562 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2563 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2564 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2565 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2566 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2567 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2568
2569 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2570 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2571 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2572 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2573 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2574 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2575 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2576 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2577 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2578 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2579 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2580 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2581 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2582 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2583 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2584 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2585
2586 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2587 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2588 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2589 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2590 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2591 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2592 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2593 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2594 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2595 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2596 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2597 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2598 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2599 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2600 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2601 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2602
2603 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2604 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2605 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2606 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2607 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2608 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2609 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2610 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2611 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2612 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2613 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2614 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2615 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2616 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2617 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2618 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2619
2620 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2621 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2622 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2623 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2624 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2625 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2626 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2627 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2628 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2629 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2630 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2631 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2632 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2633 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2634 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2635 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2636
2637 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2638 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2639 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2640 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2641 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2642 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2643 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2644 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2645 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2646 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2647 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2648 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2649 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2650 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2651 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2652 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2653
2654 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
2655 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2656 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2657 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2658 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2659 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2660 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2661 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2662 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2663 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2664 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2665 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2666 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2667 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2668 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2669 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2670};
2671AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
2672
2673/** @} */
2674
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette