VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105284

Last change on this file since 105284 was 105283, checked in by vboxsync, 7 months ago

VMM/IEM: Implement vcmpps, vcmppd, vcmpss, vcmpsd instruction emulations, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 254.7 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105283 2024-07-11 20:26:27Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
68 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
69 IEM_MC_ADVANCE_RIP_AND_FINISH();
70 IEM_MC_END();
71 }
72 else
73 {
74 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(X86XMMREG, uDst);
80 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
81 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
82 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
84 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
85 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
86 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
87 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
88 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92 }
93 else
94 {
95 /*
96 * Register, memory.
97 */
98 if (pVCpu->iem.s.uVexLength)
99 {
100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
103 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
105 IEM_MC_PREPARE_AVX_USAGE();
106
107 IEM_MC_LOCAL(X86YMMREG, uSrc2);
108 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
109 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
110 IEM_MC_LOCAL(X86YMMREG, uSrc1);
111 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
112 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
113 IEM_MC_LOCAL(X86YMMREG, uDst);
114 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
115 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
116 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
117 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
118 IEM_MC_ADVANCE_RIP_AND_FINISH();
119 IEM_MC_END();
120 }
121 else
122 {
123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
126 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
128 IEM_MC_PREPARE_AVX_USAGE();
129
130 IEM_MC_LOCAL(X86XMMREG, uDst);
131 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
132 IEM_MC_LOCAL(X86XMMREG, uSrc2);
133 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
134 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
135 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
137
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
140 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
141 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_ADVANCE_RIP_AND_FINISH();
143 IEM_MC_END();
144 }
145 }
146}
147
148
149/**
150 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,subss,etc.):
151 * - vxxxss xmm0, xmm1, xmm2/mem32
152 *
153 * Exceptions type 4. AVX cpuid check for 128-bit operation.
154 * Ignores VEX.L, from SDM:
155 * Software should ensure VADDSS is encoded with VEX.L=0.
156 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
157 * across different processor generations.
158 */
159FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
160{
161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
162 if (IEM_IS_MODRM_REG_MODE(bRm))
163 {
164 /*
165 * Register, register.
166 */
167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
170 IEM_MC_PREPARE_AVX_USAGE();
171
172 IEM_MC_LOCAL(X86XMMREG, uDst);
173 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
174 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
175 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
176 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
177 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
178 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
179 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
180 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
195 IEM_MC_PREPARE_AVX_USAGE();
196
197 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
199 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_LOCAL(X86XMMREG, uDst);
201 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
202 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
203 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
206 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
207 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
208 IEM_MC_ADVANCE_RIP_AND_FINISH();
209 IEM_MC_END();
210 }
211}
212
213
214/**
215 * Common worker for scalar AVX/AVX2 instructions on the forms (addsd,subsd,etc.):
216 * - vxxxsd xmm0, xmm1, xmm2/mem64
217 *
218 * Exceptions type 4. AVX cpuid check for 128-bit operation.
219 * Ignores VEX.L, from SDM:
220 * Software should ensure VADDSD is encoded with VEX.L=0.
221 * Encoding VADDSD with VEX.L=1 may encounter unpredictable behavior
222 * across different processor generations.
223 */
224FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R64, PFNIEMAIMPLFPAVXF3U128R64, pfnU128)
225{
226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
227 if (IEM_IS_MODRM_REG_MODE(bRm))
228 {
229 /*
230 * Register, register.
231 */
232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
233 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
235 IEM_MC_PREPARE_AVX_USAGE();
236
237 IEM_MC_LOCAL(X86XMMREG, uDst);
238 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
239 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
240 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
241 IEM_MC_ARG(PCRTFLOAT64U, pr64Src2, 2);
242 IEM_MC_REF_XREG_R64_CONST(pr64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
243 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
244 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
245 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
246 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_ADVANCE_RIP_AND_FINISH();
248 IEM_MC_END();
249 }
250 else
251 {
252 /*
253 * Register, memory.
254 */
255 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
258 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
260 IEM_MC_PREPARE_AVX_USAGE();
261
262 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
263 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
264 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
265 IEM_MC_LOCAL(X86XMMREG, uDst);
266 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
267 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
268 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
269 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
270 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
271 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
272 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
273 IEM_MC_ADVANCE_RIP_AND_FINISH();
274 IEM_MC_END();
275 }
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpxxx xmm0, xmm1, xmm2/mem128
282 * - vpxxx ymm0, ymm1, ymm2/mem256
283 *
284 * Takes function table for function w/o implicit state parameter.
285 *
286 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
287 */
288FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
289{
290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
291 if (IEM_IS_MODRM_REG_MODE(bRm))
292 {
293 /*
294 * Register, register.
295 */
296 if (pVCpu->iem.s.uVexLength)
297 {
298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
299 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
300 IEM_MC_LOCAL(RTUINT256U, uDst);
301 IEM_MC_LOCAL(RTUINT256U, uSrc1);
302 IEM_MC_LOCAL(RTUINT256U, uSrc2);
303 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
305 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
307 IEM_MC_PREPARE_AVX_USAGE();
308 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
309 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP_AND_FINISH();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
318 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
321 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
323 IEM_MC_PREPARE_AVX_USAGE();
324 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
325 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
326 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
328 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332 }
333 else
334 {
335 /*
336 * Register, memory.
337 */
338 if (pVCpu->iem.s.uVexLength)
339 {
340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
341 IEM_MC_LOCAL(RTUINT256U, uDst);
342 IEM_MC_LOCAL(RTUINT256U, uSrc1);
343 IEM_MC_LOCAL(RTUINT256U, uSrc2);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
347 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_PREPARE_AVX_USAGE();
353
354 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
357 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
358
359 IEM_MC_ADVANCE_RIP_AND_FINISH();
360 IEM_MC_END();
361 }
362 else
363 {
364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
365 IEM_MC_LOCAL(RTUINT128U, uSrc2);
366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
368 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
370
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
372 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
374 IEM_MC_PREPARE_AVX_USAGE();
375
376 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
377 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
378 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
380 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
381
382 IEM_MC_ADVANCE_RIP_AND_FINISH();
383 IEM_MC_END();
384 }
385 }
386}
387
388
389/**
390 * Common worker for AVX2 instructions on the forms:
391 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
392 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
393 *
394 * The 128-bit memory version of this instruction may elect to skip fetching the
395 * lower 64 bits of the operand. We, however, do not.
396 *
397 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
398 */
399FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
400{
401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
402}
403
404
405/**
406 * Common worker for AVX2 instructions on the forms:
407 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
408 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
409 *
410 * The 128-bit memory version of this instruction may elect to skip fetching the
411 * higher 64 bits of the operand. We, however, do not.
412 *
413 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
414 */
415FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
416{
417 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
418}
419
420
421/**
422 * Common worker for AVX2 instructions on the forms:
423 * - vpxxx xmm0, xmm1/mem128
424 * - vpxxx ymm0, ymm1/mem256
425 *
426 * Takes function table for function w/o implicit state parameter.
427 *
428 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
429 */
430FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
431{
432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
433 if (IEM_IS_MODRM_REG_MODE(bRm))
434 {
435 /*
436 * Register, register.
437 */
438 if (pVCpu->iem.s.uVexLength)
439 {
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
442 IEM_MC_LOCAL(RTUINT256U, uDst);
443 IEM_MC_LOCAL(RTUINT256U, uSrc);
444 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
445 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
447 IEM_MC_PREPARE_AVX_USAGE();
448 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451 IEM_MC_ADVANCE_RIP_AND_FINISH();
452 IEM_MC_END();
453 }
454 else
455 {
456 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
458 IEM_MC_ARG(PRTUINT128U, puDst, 0);
459 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
461 IEM_MC_PREPARE_AVX_USAGE();
462 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
463 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
464 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
466 IEM_MC_ADVANCE_RIP_AND_FINISH();
467 IEM_MC_END();
468 }
469 }
470 else
471 {
472 /*
473 * Register, memory.
474 */
475 if (pVCpu->iem.s.uVexLength)
476 {
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uDst);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
483
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
485 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
487 IEM_MC_PREPARE_AVX_USAGE();
488
489 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
490 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
491 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
492
493 IEM_MC_ADVANCE_RIP_AND_FINISH();
494 IEM_MC_END();
495 }
496 else
497 {
498 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
499 IEM_MC_LOCAL(RTUINT128U, uSrc);
500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
501 IEM_MC_ARG(PRTUINT128U, puDst, 0);
502 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
506 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
507 IEM_MC_PREPARE_AVX_USAGE();
508
509 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
510 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
511 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
512 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
513
514 IEM_MC_ADVANCE_RIP_AND_FINISH();
515 IEM_MC_END();
516 }
517 }
518}
519
520
521/**
522 * Common worker for AVX/AVX2 instructions on the forms:
523 * - vpxxx xmm0, xmm1/mem128
524 * - vpxxx ymm0, ymm1/mem256
525 *
526 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
527 */
528FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx, PCIEMOPMEDIAF2, pImpl)
529{
530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
531 if (IEM_IS_MODRM_REG_MODE(bRm))
532 {
533 /*
534 * Register, register.
535 */
536 if (pVCpu->iem.s.uVexLength)
537 {
538 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
539 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
540 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
541 IEM_MC_PREPARE_AVX_USAGE();
542
543 IEM_MC_LOCAL(X86YMMREG, uSrc);
544 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
545 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
546 IEM_MC_LOCAL(X86YMMREG, uDst);
547 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
548 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
549 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
550 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
551 IEM_MC_ADVANCE_RIP_AND_FINISH();
552 IEM_MC_END();
553 }
554 else
555 {
556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
557 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
559 IEM_MC_PREPARE_AVX_USAGE();
560
561 IEM_MC_LOCAL(X86XMMREG, uDst);
562 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
563 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
564 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
565 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
566 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
567 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
568 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
569 IEM_MC_ADVANCE_RIP_AND_FINISH();
570 IEM_MC_END();
571 }
572 }
573 else
574 {
575 /*
576 * Register, memory.
577 */
578 if (pVCpu->iem.s.uVexLength)
579 {
580 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
583 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
584 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
585 IEM_MC_PREPARE_AVX_USAGE();
586
587 IEM_MC_LOCAL(X86YMMREG, uSrc);
588 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
589 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
590 IEM_MC_LOCAL(X86YMMREG, uDst);
591 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
592 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
594 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
595 IEM_MC_ADVANCE_RIP_AND_FINISH();
596 IEM_MC_END();
597 }
598 else
599 {
600 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
603 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
604 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
605 IEM_MC_PREPARE_AVX_USAGE();
606
607 IEM_MC_LOCAL(X86XMMREG, uDst);
608 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
609 IEM_MC_LOCAL(X86XMMREG, uSrc);
610 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
611 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
612 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
613 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
614 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
615 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
616 IEM_MC_ADVANCE_RIP_AND_FINISH();
617 IEM_MC_END();
618 }
619 }
620}
621
622
623
624/* Opcode VEX.0F 0x00 - invalid */
625/* Opcode VEX.0F 0x01 - invalid */
626/* Opcode VEX.0F 0x02 - invalid */
627/* Opcode VEX.0F 0x03 - invalid */
628/* Opcode VEX.0F 0x04 - invalid */
629/* Opcode VEX.0F 0x05 - invalid */
630/* Opcode VEX.0F 0x06 - invalid */
631/* Opcode VEX.0F 0x07 - invalid */
632/* Opcode VEX.0F 0x08 - invalid */
633/* Opcode VEX.0F 0x09 - invalid */
634/* Opcode VEX.0F 0x0a - invalid */
635
636/** Opcode VEX.0F 0x0b. */
637FNIEMOP_DEF(iemOp_vud2)
638{
639 IEMOP_MNEMONIC(vud2, "vud2");
640 IEMOP_RAISE_INVALID_OPCODE_RET();
641}
642
643/* Opcode VEX.0F 0x0c - invalid */
644/* Opcode VEX.0F 0x0d - invalid */
645/* Opcode VEX.0F 0x0e - invalid */
646/* Opcode VEX.0F 0x0f - invalid */
647
648
649/**
650 * @opcode 0x10
651 * @oppfx none
652 * @opcpuid avx
653 * @opgroup og_avx_simdfp_datamove
654 * @opxcpttype 4UA
655 * @optest op1=1 op2=2 -> op1=2
656 * @optest op1=0 op2=-22 -> op1=-22
657 */
658FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
659{
660 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
661 Assert(pVCpu->iem.s.uVexLength <= 1);
662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
663 if (IEM_IS_MODRM_REG_MODE(bRm))
664 {
665 /*
666 * Register, register.
667 */
668 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
669 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
671 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
672 if (pVCpu->iem.s.uVexLength == 0)
673 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
674 IEM_GET_MODRM_RM(pVCpu, bRm));
675 else
676 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
677 IEM_GET_MODRM_RM(pVCpu, bRm));
678 IEM_MC_ADVANCE_RIP_AND_FINISH();
679 IEM_MC_END();
680 }
681 else if (pVCpu->iem.s.uVexLength == 0)
682 {
683 /*
684 * 128-bit: Register, Memory
685 */
686 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
687 IEM_MC_LOCAL(RTUINT128U, uSrc);
688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
689
690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
691 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
692 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
693 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
694
695 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
696 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
697
698 IEM_MC_ADVANCE_RIP_AND_FINISH();
699 IEM_MC_END();
700 }
701 else
702 {
703 /*
704 * 256-bit: Register, Memory
705 */
706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
707 IEM_MC_LOCAL(RTUINT256U, uSrc);
708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
709
710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
713 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
714
715 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
716 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
717
718 IEM_MC_ADVANCE_RIP_AND_FINISH();
719 IEM_MC_END();
720 }
721}
722
723
724/**
725 * @opcode 0x10
726 * @oppfx 0x66
727 * @opcpuid avx
728 * @opgroup og_avx_simdfp_datamove
729 * @opxcpttype 4UA
730 * @optest op1=1 op2=2 -> op1=2
731 * @optest op1=0 op2=-22 -> op1=-22
732 */
733FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
734{
735 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
736 Assert(pVCpu->iem.s.uVexLength <= 1);
737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
738 if (IEM_IS_MODRM_REG_MODE(bRm))
739 {
740 /*
741 * Register, register.
742 */
743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
747 if (pVCpu->iem.s.uVexLength == 0)
748 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
749 IEM_GET_MODRM_RM(pVCpu, bRm));
750 else
751 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
752 IEM_GET_MODRM_RM(pVCpu, bRm));
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756 else if (pVCpu->iem.s.uVexLength == 0)
757 {
758 /*
759 * 128-bit: Memory, register.
760 */
761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
762 IEM_MC_LOCAL(RTUINT128U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
768 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
769
770 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
771 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
772
773 IEM_MC_ADVANCE_RIP_AND_FINISH();
774 IEM_MC_END();
775 }
776 else
777 {
778 /*
779 * 256-bit: Memory, register.
780 */
781 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
782 IEM_MC_LOCAL(RTUINT256U, uSrc);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
787 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
788 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
789
790 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
792
793 IEM_MC_ADVANCE_RIP_AND_FINISH();
794 IEM_MC_END();
795 }
796}
797
798
799FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
800{
801 Assert(pVCpu->iem.s.uVexLength <= 1);
802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
803 if (IEM_IS_MODRM_REG_MODE(bRm))
804 {
805 /**
806 * @opcode 0x10
807 * @oppfx 0xf3
808 * @opcodesub 11 mr/reg
809 * @opcpuid avx
810 * @opgroup og_avx_simdfp_datamerge
811 * @opxcpttype 5
812 * @optest op1=1 op2=0 op3=2 -> op1=2
813 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
814 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
815 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
816 * @note HssHi refers to bits 127:32.
817 */
818 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
819 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
823 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
824 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
825 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
826 IEM_MC_ADVANCE_RIP_AND_FINISH();
827 IEM_MC_END();
828 }
829 else
830 {
831 /**
832 * @opdone
833 * @opcode 0x10
834 * @oppfx 0xf3
835 * @opcodesub !11 mr/reg
836 * @opcpuid avx
837 * @opgroup og_avx_simdfp_datamove
838 * @opxcpttype 5
839 * @opfunction iemOp_vmovss_Vss_Hss_Wss
840 * @optest op1=1 op2=2 -> op1=2
841 * @optest op1=0 op2=-22 -> op1=-22
842 */
843 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
844 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
845 IEM_MC_LOCAL(uint32_t, uSrc);
846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
847
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
852
853 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
855
856 IEM_MC_ADVANCE_RIP_AND_FINISH();
857 IEM_MC_END();
858 }
859}
860
861
862FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
863{
864 Assert(pVCpu->iem.s.uVexLength <= 1);
865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
866 if (IEM_IS_MODRM_REG_MODE(bRm))
867 {
868 /**
869 * @opcode 0x10
870 * @oppfx 0xf2
871 * @opcodesub 11 mr/reg
872 * @opcpuid avx
873 * @opgroup og_avx_simdfp_datamerge
874 * @opxcpttype 5
875 * @optest op1=1 op2=0 op3=2 -> op1=2
876 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
877 * @optest op1=3 op2=-1 op3=0x77 ->
878 * op1=0xffffffffffffffff0000000000000077
879 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
880 */
881 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
882 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
883 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
884
885 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
886 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
887 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
888 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
889 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
890 IEM_MC_ADVANCE_RIP_AND_FINISH();
891 IEM_MC_END();
892 }
893 else
894 {
895 /**
896 * @opdone
897 * @opcode 0x10
898 * @oppfx 0xf2
899 * @opcodesub !11 mr/reg
900 * @opcpuid avx
901 * @opgroup og_avx_simdfp_datamove
902 * @opxcpttype 5
903 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
904 * @optest op1=1 op2=2 -> op1=2
905 * @optest op1=0 op2=-22 -> op1=-22
906 */
907 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
909 IEM_MC_LOCAL(uint64_t, uSrc);
910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
911
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
914 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
915 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
916
917 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
918 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
919
920 IEM_MC_ADVANCE_RIP_AND_FINISH();
921 IEM_MC_END();
922 }
923}
924
925
926/**
927 * @opcode 0x11
928 * @oppfx none
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamove
931 * @opxcpttype 4UA
932 * @optest op1=1 op2=2 -> op1=2
933 * @optest op1=0 op2=-22 -> op1=-22
934 */
935FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
936{
937 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
938 Assert(pVCpu->iem.s.uVexLength <= 1);
939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
940 if (IEM_IS_MODRM_REG_MODE(bRm))
941 {
942 /*
943 * Register, register.
944 */
945 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
949 if (pVCpu->iem.s.uVexLength == 0)
950 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
951 IEM_GET_MODRM_REG(pVCpu, bRm));
952 else
953 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
954 IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_ADVANCE_RIP_AND_FINISH();
956 IEM_MC_END();
957 }
958 else if (pVCpu->iem.s.uVexLength == 0)
959 {
960 /*
961 * 128-bit: Memory, register.
962 */
963 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
964 IEM_MC_LOCAL(RTUINT128U, uSrc);
965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
966
967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
968 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
969 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
970 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
971
972 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
973 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
974
975 IEM_MC_ADVANCE_RIP_AND_FINISH();
976 IEM_MC_END();
977 }
978 else
979 {
980 /*
981 * 256-bit: Memory, register.
982 */
983 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
984 IEM_MC_LOCAL(RTUINT256U, uSrc);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
991
992 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * @opcode 0x11
1003 * @oppfx 0x66
1004 * @opcpuid avx
1005 * @opgroup og_avx_simdfp_datamove
1006 * @opxcpttype 4UA
1007 * @optest op1=1 op2=2 -> op1=2
1008 * @optest op1=0 op2=-22 -> op1=-22
1009 */
1010FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1011{
1012 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1013 Assert(pVCpu->iem.s.uVexLength <= 1);
1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1015 if (IEM_IS_MODRM_REG_MODE(bRm))
1016 {
1017 /*
1018 * Register, register.
1019 */
1020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1021 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1022 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1023 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1024 if (pVCpu->iem.s.uVexLength == 0)
1025 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1026 IEM_GET_MODRM_REG(pVCpu, bRm));
1027 else
1028 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1029 IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_ADVANCE_RIP_AND_FINISH();
1031 IEM_MC_END();
1032 }
1033 else if (pVCpu->iem.s.uVexLength == 0)
1034 {
1035 /*
1036 * 128-bit: Memory, register.
1037 */
1038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1039 IEM_MC_LOCAL(RTUINT128U, uSrc);
1040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1041
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1045 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1046
1047 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
1048 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1049
1050 IEM_MC_ADVANCE_RIP_AND_FINISH();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 /*
1056 * 256-bit: Memory, register.
1057 */
1058 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1059 IEM_MC_LOCAL(RTUINT256U, uSrc);
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1066
1067 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1068 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1069
1070 IEM_MC_ADVANCE_RIP_AND_FINISH();
1071 IEM_MC_END();
1072 }
1073}
1074
1075
1076FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
1077{
1078 Assert(pVCpu->iem.s.uVexLength <= 1);
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /**
1083 * @opcode 0x11
1084 * @oppfx 0xf3
1085 * @opcodesub 11 mr/reg
1086 * @opcpuid avx
1087 * @opgroup og_avx_simdfp_datamerge
1088 * @opxcpttype 5
1089 * @optest op1=1 op2=0 op3=2 -> op1=2
1090 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
1091 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
1092 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
1093 */
1094 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1095 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1096 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1097
1098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1100 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
1101 IEM_GET_MODRM_REG(pVCpu, bRm),
1102 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1103 IEM_MC_ADVANCE_RIP_AND_FINISH();
1104 IEM_MC_END();
1105 }
1106 else
1107 {
1108 /**
1109 * @opdone
1110 * @opcode 0x11
1111 * @oppfx 0xf3
1112 * @opcodesub !11 mr/reg
1113 * @opcpuid avx
1114 * @opgroup og_avx_simdfp_datamove
1115 * @opxcpttype 5
1116 * @opfunction iemOp_vmovss_Vss_Hss_Wss
1117 * @optest op1=1 op2=2 -> op1=2
1118 * @optest op1=0 op2=-22 -> op1=-22
1119 */
1120 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1121 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1122 IEM_MC_LOCAL(uint32_t, uSrc);
1123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1124
1125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1126 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1128 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1129
1130 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1131 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1132
1133 IEM_MC_ADVANCE_RIP_AND_FINISH();
1134 IEM_MC_END();
1135 }
1136}
1137
1138
1139FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
1140{
1141 Assert(pVCpu->iem.s.uVexLength <= 1);
1142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1143 if (IEM_IS_MODRM_REG_MODE(bRm))
1144 {
1145 /**
1146 * @opcode 0x11
1147 * @oppfx 0xf2
1148 * @opcodesub 11 mr/reg
1149 * @opcpuid avx
1150 * @opgroup og_avx_simdfp_datamerge
1151 * @opxcpttype 5
1152 * @optest op1=1 op2=0 op3=2 -> op1=2
1153 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
1154 * @optest op1=3 op2=-1 op3=0x77 ->
1155 * op1=0xffffffffffffffff0000000000000077
1156 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
1157 */
1158 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1159 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1160 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1161
1162 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1163 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1164 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1165 IEM_GET_MODRM_REG(pVCpu, bRm),
1166 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1167 IEM_MC_ADVANCE_RIP_AND_FINISH();
1168 IEM_MC_END();
1169 }
1170 else
1171 {
1172 /**
1173 * @opdone
1174 * @opcode 0x11
1175 * @oppfx 0xf2
1176 * @opcodesub !11 mr/reg
1177 * @opcpuid avx
1178 * @opgroup og_avx_simdfp_datamove
1179 * @opxcpttype 5
1180 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1181 * @optest op1=1 op2=2 -> op1=2
1182 * @optest op1=0 op2=-22 -> op1=-22
1183 */
1184 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1186 IEM_MC_LOCAL(uint64_t, uSrc);
1187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1188
1189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1193
1194 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1195 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1196
1197 IEM_MC_ADVANCE_RIP_AND_FINISH();
1198 IEM_MC_END();
1199 }
1200}
1201
1202
1203FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1204{
1205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1206 if (IEM_IS_MODRM_REG_MODE(bRm))
1207 {
1208 /**
1209 * @opcode 0x12
1210 * @opcodesub 11 mr/reg
1211 * @oppfx none
1212 * @opcpuid avx
1213 * @opgroup og_avx_simdfp_datamerge
1214 * @opxcpttype 7LZ
1215 * @optest op2=0x2200220122022203
1216 * op3=0x3304330533063307
1217 * -> op1=0x22002201220222033304330533063307
1218 * @optest op2=-1 op3=-42 -> op1=-42
1219 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1220 */
1221 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1222 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1223 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1224
1225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1227 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1228 IEM_GET_MODRM_RM(pVCpu, bRm),
1229 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 /**
1237 * @opdone
1238 * @opcode 0x12
1239 * @opcodesub !11 mr/reg
1240 * @oppfx none
1241 * @opcpuid avx
1242 * @opgroup og_avx_simdfp_datamove
1243 * @opxcpttype 5LZ
1244 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1245 * @optest op1=1 op2=0 op3=0 -> op1=0
1246 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1247 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1248 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1249 */
1250 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1251
1252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1253 IEM_MC_LOCAL(uint64_t, uSrc);
1254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1255
1256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1257 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1260
1261 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1262 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1263 uSrc,
1264 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1265
1266 IEM_MC_ADVANCE_RIP_AND_FINISH();
1267 IEM_MC_END();
1268 }
1269}
1270
1271
1272/**
1273 * @opcode 0x12
1274 * @opcodesub !11 mr/reg
1275 * @oppfx 0x66
1276 * @opcpuid avx
1277 * @opgroup og_avx_pcksclr_datamerge
1278 * @opxcpttype 5LZ
1279 * @optest op2=0 op3=2 -> op1=2
1280 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1281 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1282 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1283 */
1284FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1285{
1286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1287 if (IEM_IS_MODRM_MEM_MODE(bRm))
1288 {
1289 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1290
1291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1292 IEM_MC_LOCAL(uint64_t, uSrc);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1298 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1299
1300 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1301 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1302 uSrc,
1303 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1304
1305 IEM_MC_ADVANCE_RIP_AND_FINISH();
1306 IEM_MC_END();
1307 }
1308
1309 /**
1310 * @opdone
1311 * @opmnemonic udvex660f12m3
1312 * @opcode 0x12
1313 * @opcodesub 11 mr/reg
1314 * @oppfx 0x66
1315 * @opunused immediate
1316 * @opcpuid avx
1317 * @optest ->
1318 */
1319 else
1320 IEMOP_RAISE_INVALID_OPCODE_RET();
1321}
1322
1323
1324/**
1325 * @opcode 0x12
1326 * @oppfx 0xf3
1327 * @opcpuid avx
1328 * @opgroup og_avx_pcksclr_datamove
1329 * @opxcpttype 4
1330 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1331 * -> op1=0x00000002000000020000000100000001
1332 * @optest vex.l==1 /
1333 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1334 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1335 */
1336FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1337{
1338 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1339 Assert(pVCpu->iem.s.uVexLength <= 1);
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 if (IEM_IS_MODRM_REG_MODE(bRm))
1342 {
1343 /*
1344 * Register, register.
1345 */
1346 if (pVCpu->iem.s.uVexLength == 0)
1347 {
1348 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1350 IEM_MC_LOCAL(RTUINT128U, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1353 IEM_MC_PREPARE_AVX_USAGE();
1354
1355 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1356 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1357 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1358 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1359 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1360 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1361
1362 IEM_MC_ADVANCE_RIP_AND_FINISH();
1363 IEM_MC_END();
1364 }
1365 else
1366 {
1367 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1368 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1370 IEM_MC_PREPARE_AVX_USAGE();
1371
1372 IEM_MC_LOCAL(RTUINT256U, uSrc);
1373 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1374 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1375 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1376 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1377 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1378 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1379 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1380 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1381 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1382 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1383
1384 IEM_MC_ADVANCE_RIP_AND_FINISH();
1385 IEM_MC_END();
1386 }
1387 }
1388 else
1389 {
1390 /*
1391 * Register, memory.
1392 */
1393 if (pVCpu->iem.s.uVexLength == 0)
1394 {
1395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1396 IEM_MC_LOCAL(RTUINT128U, uSrc);
1397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1400 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1402 IEM_MC_PREPARE_AVX_USAGE();
1403
1404 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1405 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1406 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1407 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1408 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1409 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1410
1411 IEM_MC_ADVANCE_RIP_AND_FINISH();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1419 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1420 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1421 IEM_MC_PREPARE_AVX_USAGE();
1422
1423 IEM_MC_LOCAL(RTUINT256U, uSrc);
1424 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1425
1426 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1427 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1428 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1429 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1430 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1431 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1432 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1433 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1434 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1435
1436 IEM_MC_ADVANCE_RIP_AND_FINISH();
1437 IEM_MC_END();
1438 }
1439 }
1440}
1441
1442
1443/**
1444 * @opcode 0x12
1445 * @oppfx 0xf2
1446 * @opcpuid avx
1447 * @opgroup og_avx_pcksclr_datamove
1448 * @opxcpttype 5
1449 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1450 * -> op1=0x22222222111111112222222211111111
1451 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1452 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1453 */
1454FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1455{
1456 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1458 if (IEM_IS_MODRM_REG_MODE(bRm))
1459 {
1460 /*
1461 * Register, register.
1462 */
1463 if (pVCpu->iem.s.uVexLength == 0)
1464 {
1465 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1466 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1467 IEM_MC_LOCAL(uint64_t, uSrc);
1468
1469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1470 IEM_MC_PREPARE_AVX_USAGE();
1471
1472 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1473 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1474 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1475 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1476
1477 IEM_MC_ADVANCE_RIP_AND_FINISH();
1478 IEM_MC_END();
1479 }
1480 else
1481 {
1482 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1485 IEM_MC_PREPARE_AVX_USAGE();
1486
1487 IEM_MC_LOCAL(uint64_t, uSrc1);
1488 IEM_MC_LOCAL(uint64_t, uSrc2);
1489 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1490 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1491
1492 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1493 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1494 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1495 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1496 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1497
1498 IEM_MC_ADVANCE_RIP_AND_FINISH();
1499 IEM_MC_END();
1500 }
1501 }
1502 else
1503 {
1504 /*
1505 * Register, memory.
1506 */
1507 if (pVCpu->iem.s.uVexLength == 0)
1508 {
1509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1511 IEM_MC_LOCAL(uint64_t, uSrc);
1512
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1516 IEM_MC_PREPARE_AVX_USAGE();
1517
1518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1520 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1521 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1522
1523 IEM_MC_ADVANCE_RIP_AND_FINISH();
1524 IEM_MC_END();
1525 }
1526 else
1527 {
1528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1534 IEM_MC_PREPARE_AVX_USAGE();
1535
1536 IEM_MC_LOCAL(RTUINT256U, uSrc);
1537 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538
1539 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1540 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1541 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1542 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1543 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1544
1545 IEM_MC_ADVANCE_RIP_AND_FINISH();
1546 IEM_MC_END();
1547 }
1548 }
1549}
1550
1551
1552/**
1553 * @opcode 0x13
1554 * @opcodesub !11 mr/reg
1555 * @oppfx none
1556 * @opcpuid avx
1557 * @opgroup og_avx_simdfp_datamove
1558 * @opxcpttype 5
1559 * @optest op1=1 op2=2 -> op1=2
1560 * @optest op1=0 op2=-42 -> op1=-42
1561 */
1562FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1563{
1564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1565 if (IEM_IS_MODRM_MEM_MODE(bRm))
1566 {
1567 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1568
1569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1570 IEM_MC_LOCAL(uint64_t, uSrc);
1571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1572
1573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1574 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1575 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1576 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1577
1578 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1579 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1580
1581 IEM_MC_ADVANCE_RIP_AND_FINISH();
1582 IEM_MC_END();
1583 }
1584
1585 /**
1586 * @opdone
1587 * @opmnemonic udvex0f13m3
1588 * @opcode 0x13
1589 * @opcodesub 11 mr/reg
1590 * @oppfx none
1591 * @opunused immediate
1592 * @opcpuid avx
1593 * @optest ->
1594 */
1595 else
1596 IEMOP_RAISE_INVALID_OPCODE_RET();
1597}
1598
1599
1600/**
1601 * @opcode 0x13
1602 * @opcodesub !11 mr/reg
1603 * @oppfx 0x66
1604 * @opcpuid avx
1605 * @opgroup og_avx_pcksclr_datamove
1606 * @opxcpttype 5
1607 * @optest op1=1 op2=2 -> op1=2
1608 * @optest op1=0 op2=-42 -> op1=-42
1609 */
1610FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1611{
1612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1613 if (IEM_IS_MODRM_MEM_MODE(bRm))
1614 {
1615 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1617 IEM_MC_LOCAL(uint64_t, uSrc);
1618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1619
1620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1621 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1624
1625 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1626 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1627
1628 IEM_MC_ADVANCE_RIP_AND_FINISH();
1629 IEM_MC_END();
1630 }
1631
1632 /**
1633 * @opdone
1634 * @opmnemonic udvex660f13m3
1635 * @opcode 0x13
1636 * @opcodesub 11 mr/reg
1637 * @oppfx 0x66
1638 * @opunused immediate
1639 * @opcpuid avx
1640 * @optest ->
1641 */
1642 else
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644}
1645
1646/* Opcode VEX.F3.0F 0x13 - invalid */
1647/* Opcode VEX.F2.0F 0x13 - invalid */
1648
1649/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1650FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1651{
1652 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1653 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1654 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1655}
1656
1657
1658/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1659FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1660{
1661 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1662 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1663 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1664}
1665
1666
1667/* Opcode VEX.F3.0F 0x14 - invalid */
1668/* Opcode VEX.F2.0F 0x14 - invalid */
1669
1670
1671/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1672FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1673{
1674 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1675 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1676 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1677}
1678
1679
1680/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1681FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1682{
1683 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1684 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1685 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1686}
1687
1688
1689/* Opcode VEX.F3.0F 0x15 - invalid */
1690/* Opcode VEX.F2.0F 0x15 - invalid */
1691
1692
1693FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1694{
1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1696 if (IEM_IS_MODRM_REG_MODE(bRm))
1697 {
1698 /**
1699 * @opcode 0x16
1700 * @opcodesub 11 mr/reg
1701 * @oppfx none
1702 * @opcpuid avx
1703 * @opgroup og_avx_simdfp_datamerge
1704 * @opxcpttype 7LZ
1705 */
1706 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1707
1708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1709 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1710
1711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1712 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1713 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1714 IEM_GET_MODRM_RM(pVCpu, bRm),
1715 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1716
1717 IEM_MC_ADVANCE_RIP_AND_FINISH();
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 /**
1723 * @opdone
1724 * @opcode 0x16
1725 * @opcodesub !11 mr/reg
1726 * @oppfx none
1727 * @opcpuid avx
1728 * @opgroup og_avx_simdfp_datamove
1729 * @opxcpttype 5LZ
1730 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1731 */
1732 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733
1734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1735 IEM_MC_LOCAL(uint64_t, uSrc);
1736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1737
1738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1739 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1742
1743 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1744 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1745 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1746 uSrc);
1747
1748 IEM_MC_ADVANCE_RIP_AND_FINISH();
1749 IEM_MC_END();
1750 }
1751}
1752
1753
1754/**
1755 * @opcode 0x16
1756 * @opcodesub !11 mr/reg
1757 * @oppfx 0x66
1758 * @opcpuid avx
1759 * @opgroup og_avx_pcksclr_datamerge
1760 * @opxcpttype 5LZ
1761 */
1762FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1763{
1764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1765 if (IEM_IS_MODRM_MEM_MODE(bRm))
1766 {
1767 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1768
1769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1770 IEM_MC_LOCAL(uint64_t, uSrc);
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1776 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1777
1778 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1779 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1780 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1781 uSrc);
1782
1783 IEM_MC_ADVANCE_RIP_AND_FINISH();
1784 IEM_MC_END();
1785 }
1786
1787 /**
1788 * @opdone
1789 * @opmnemonic udvex660f16m3
1790 * @opcode 0x12
1791 * @opcodesub 11 mr/reg
1792 * @oppfx 0x66
1793 * @opunused immediate
1794 * @opcpuid avx
1795 * @optest ->
1796 */
1797 else
1798 IEMOP_RAISE_INVALID_OPCODE_RET();
1799}
1800
1801
1802/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1803/**
1804 * @opcode 0x16
1805 * @oppfx 0xf3
1806 * @opcpuid avx
1807 * @opgroup og_avx_pcksclr_datamove
1808 * @opxcpttype 4
1809 */
1810FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1811{
1812 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1813 Assert(pVCpu->iem.s.uVexLength <= 1);
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 if (IEM_IS_MODRM_REG_MODE(bRm))
1816 {
1817 /*
1818 * Register, register.
1819 */
1820 if (pVCpu->iem.s.uVexLength == 0)
1821 {
1822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1823 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1824 IEM_MC_LOCAL(RTUINT128U, uSrc);
1825
1826 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1827 IEM_MC_PREPARE_AVX_USAGE();
1828
1829 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1830 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1831 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1832 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1833 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1834 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1835
1836 IEM_MC_ADVANCE_RIP_AND_FINISH();
1837 IEM_MC_END();
1838 }
1839 else
1840 {
1841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1842 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1844 IEM_MC_PREPARE_AVX_USAGE();
1845
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1848 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1849 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1850 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1851 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1852 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1853 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1854 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1855 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1856 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1857
1858 IEM_MC_ADVANCE_RIP_AND_FINISH();
1859 IEM_MC_END();
1860 }
1861 }
1862 else
1863 {
1864 /*
1865 * Register, memory.
1866 */
1867 if (pVCpu->iem.s.uVexLength == 0)
1868 {
1869 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1870 IEM_MC_LOCAL(RTUINT128U, uSrc);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872
1873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1874 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1876 IEM_MC_PREPARE_AVX_USAGE();
1877
1878 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1879 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1880 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1881 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1882 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1883 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1884
1885 IEM_MC_ADVANCE_RIP_AND_FINISH();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1895 IEM_MC_PREPARE_AVX_USAGE();
1896
1897 IEM_MC_LOCAL(RTUINT256U, uSrc);
1898 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1899
1900 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1901 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1902 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1903 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1904 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1905 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1906 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1907 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1908 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1909
1910 IEM_MC_ADVANCE_RIP_AND_FINISH();
1911 IEM_MC_END();
1912 }
1913 }
1914}
1915
1916
1917/* Opcode VEX.F2.0F 0x16 - invalid */
1918
1919
1920/**
1921 * @opcode 0x17
1922 * @opcodesub !11 mr/reg
1923 * @oppfx none
1924 * @opcpuid avx
1925 * @opgroup og_avx_simdfp_datamove
1926 * @opxcpttype 5
1927 */
1928FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1929{
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 if (IEM_IS_MODRM_MEM_MODE(bRm))
1932 {
1933 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1934
1935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1936 IEM_MC_LOCAL(uint64_t, uSrc);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1941 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1942 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1943
1944 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1946
1947 IEM_MC_ADVANCE_RIP_AND_FINISH();
1948 IEM_MC_END();
1949 }
1950
1951 /**
1952 * @opdone
1953 * @opmnemonic udvex0f17m3
1954 * @opcode 0x17
1955 * @opcodesub 11 mr/reg
1956 * @oppfx none
1957 * @opunused immediate
1958 * @opcpuid avx
1959 * @optest ->
1960 */
1961 else
1962 IEMOP_RAISE_INVALID_OPCODE_RET();
1963}
1964
1965
1966/**
1967 * @opcode 0x17
1968 * @opcodesub !11 mr/reg
1969 * @oppfx 0x66
1970 * @opcpuid avx
1971 * @opgroup og_avx_pcksclr_datamove
1972 * @opxcpttype 5
1973 */
1974FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1975{
1976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1977 if (IEM_IS_MODRM_MEM_MODE(bRm))
1978 {
1979 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1980
1981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1982 IEM_MC_LOCAL(uint64_t, uSrc);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984
1985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1986 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1988 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1989
1990 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1991 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1992
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 }
1996
1997 /**
1998 * @opdone
1999 * @opmnemonic udvex660f17m3
2000 * @opcode 0x17
2001 * @opcodesub 11 mr/reg
2002 * @oppfx 0x66
2003 * @opunused immediate
2004 * @opcpuid avx
2005 * @optest ->
2006 */
2007 else
2008 IEMOP_RAISE_INVALID_OPCODE_RET();
2009}
2010
2011
2012/* Opcode VEX.F3.0F 0x17 - invalid */
2013/* Opcode VEX.F2.0F 0x17 - invalid */
2014
2015
2016/* Opcode VEX.0F 0x18 - invalid */
2017/* Opcode VEX.0F 0x19 - invalid */
2018/* Opcode VEX.0F 0x1a - invalid */
2019/* Opcode VEX.0F 0x1b - invalid */
2020/* Opcode VEX.0F 0x1c - invalid */
2021/* Opcode VEX.0F 0x1d - invalid */
2022/* Opcode VEX.0F 0x1e - invalid */
2023/* Opcode VEX.0F 0x1f - invalid */
2024
2025/* Opcode VEX.0F 0x20 - invalid */
2026/* Opcode VEX.0F 0x21 - invalid */
2027/* Opcode VEX.0F 0x22 - invalid */
2028/* Opcode VEX.0F 0x23 - invalid */
2029/* Opcode VEX.0F 0x24 - invalid */
2030/* Opcode VEX.0F 0x25 - invalid */
2031/* Opcode VEX.0F 0x26 - invalid */
2032/* Opcode VEX.0F 0x27 - invalid */
2033
2034/**
2035 * @opcode 0x28
2036 * @oppfx none
2037 * @opcpuid avx
2038 * @opgroup og_avx_pcksclr_datamove
2039 * @opxcpttype 1
2040 * @optest op1=1 op2=2 -> op1=2
2041 * @optest op1=0 op2=-42 -> op1=-42
2042 * @note Almost identical to vmovapd.
2043 */
2044FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2045{
2046 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2048 Assert(pVCpu->iem.s.uVexLength <= 1);
2049 if (IEM_IS_MODRM_REG_MODE(bRm))
2050 {
2051 /*
2052 * Register, register.
2053 */
2054 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2055 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2056
2057 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2058 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2059 if (pVCpu->iem.s.uVexLength == 0)
2060 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2061 IEM_GET_MODRM_RM(pVCpu, bRm));
2062 else
2063 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2064 IEM_GET_MODRM_RM(pVCpu, bRm));
2065 IEM_MC_ADVANCE_RIP_AND_FINISH();
2066 IEM_MC_END();
2067 }
2068 else
2069 {
2070 /*
2071 * Register, memory.
2072 */
2073 if (pVCpu->iem.s.uVexLength == 0)
2074 {
2075 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT128U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 else
2091 {
2092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2094 IEM_MC_LOCAL(RTUINT256U, uSrc);
2095
2096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2097 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2100
2101 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2102 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2103
2104 IEM_MC_ADVANCE_RIP_AND_FINISH();
2105 IEM_MC_END();
2106 }
2107 }
2108}
2109
2110
2111/**
2112 * @opcode 0x28
2113 * @oppfx 66
2114 * @opcpuid avx
2115 * @opgroup og_avx_pcksclr_datamove
2116 * @opxcpttype 1
2117 * @optest op1=1 op2=2 -> op1=2
2118 * @optest op1=0 op2=-42 -> op1=-42
2119 * @note Almost identical to vmovaps
2120 */
2121FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2122{
2123 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2125 Assert(pVCpu->iem.s.uVexLength <= 1);
2126 if (IEM_IS_MODRM_REG_MODE(bRm))
2127 {
2128 /*
2129 * Register, register.
2130 */
2131 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2132 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2133
2134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2136 if (pVCpu->iem.s.uVexLength == 0)
2137 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2138 IEM_GET_MODRM_RM(pVCpu, bRm));
2139 else
2140 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2141 IEM_GET_MODRM_RM(pVCpu, bRm));
2142 IEM_MC_ADVANCE_RIP_AND_FINISH();
2143 IEM_MC_END();
2144 }
2145 else
2146 {
2147 /*
2148 * Register, memory.
2149 */
2150 if (pVCpu->iem.s.uVexLength == 0)
2151 {
2152 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2154 IEM_MC_LOCAL(RTUINT128U, uSrc);
2155
2156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2157 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2158 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2162 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2163
2164 IEM_MC_ADVANCE_RIP_AND_FINISH();
2165 IEM_MC_END();
2166 }
2167 else
2168 {
2169 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171 IEM_MC_LOCAL(RTUINT256U, uSrc);
2172
2173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2176 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2177
2178 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2179 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 }
2185}
2186
2187/**
2188 * @opmnemonic udvexf30f28
2189 * @opcode 0x28
2190 * @oppfx 0xf3
2191 * @opunused vex.modrm
2192 * @opcpuid avx
2193 * @optest ->
2194 * @opdone
2195 */
2196
2197/**
2198 * @opmnemonic udvexf20f28
2199 * @opcode 0x28
2200 * @oppfx 0xf2
2201 * @opunused vex.modrm
2202 * @opcpuid avx
2203 * @optest ->
2204 * @opdone
2205 */
2206
2207/**
2208 * @opcode 0x29
2209 * @oppfx none
2210 * @opcpuid avx
2211 * @opgroup og_avx_pcksclr_datamove
2212 * @opxcpttype 1
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 * @note Almost identical to vmovapd.
2216 */
2217FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2218{
2219 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 if (IEM_IS_MODRM_REG_MODE(bRm))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2228 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2229
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232 if (pVCpu->iem.s.uVexLength == 0)
2233 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2234 IEM_GET_MODRM_REG(pVCpu, bRm));
2235 else
2236 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2237 IEM_GET_MODRM_REG(pVCpu, bRm));
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 }
2241 else
2242 {
2243 /*
2244 * Register, memory.
2245 */
2246 if (pVCpu->iem.s.uVexLength == 0)
2247 {
2248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250 IEM_MC_LOCAL(RTUINT128U, uSrc);
2251
2252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2253 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2256
2257 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2258 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2259
2260 IEM_MC_ADVANCE_RIP_AND_FINISH();
2261 IEM_MC_END();
2262 }
2263 else
2264 {
2265 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2267 IEM_MC_LOCAL(RTUINT256U, uSrc);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2270 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2271 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2273
2274 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2275 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2276
2277 IEM_MC_ADVANCE_RIP_AND_FINISH();
2278 IEM_MC_END();
2279 }
2280 }
2281}
2282
2283/**
2284 * @opcode 0x29
2285 * @oppfx 66
2286 * @opcpuid avx
2287 * @opgroup og_avx_pcksclr_datamove
2288 * @opxcpttype 1
2289 * @optest op1=1 op2=2 -> op1=2
2290 * @optest op1=0 op2=-42 -> op1=-42
2291 * @note Almost identical to vmovaps
2292 */
2293FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2294{
2295 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2296 Assert(pVCpu->iem.s.uVexLength <= 1);
2297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2298 if (IEM_IS_MODRM_REG_MODE(bRm))
2299 {
2300 /*
2301 * Register, register.
2302 */
2303 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2305
2306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2307 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2308 if (pVCpu->iem.s.uVexLength == 0)
2309 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2310 IEM_GET_MODRM_REG(pVCpu, bRm));
2311 else
2312 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2313 IEM_GET_MODRM_REG(pVCpu, bRm));
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 if (pVCpu->iem.s.uVexLength == 0)
2323 {
2324 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2326 IEM_MC_LOCAL(RTUINT128U, uSrc);
2327
2328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2332
2333 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2334 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2335
2336 IEM_MC_ADVANCE_RIP_AND_FINISH();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343 IEM_MC_LOCAL(RTUINT256U, uSrc);
2344
2345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2349
2350 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2351 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2352
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 }
2356 }
2357}
2358
2359
2360/**
2361 * @opmnemonic udvexf30f29
2362 * @opcode 0x29
2363 * @oppfx 0xf3
2364 * @opunused vex.modrm
2365 * @opcpuid avx
2366 * @optest ->
2367 * @opdone
2368 */
2369
2370/**
2371 * @opmnemonic udvexf20f29
2372 * @opcode 0x29
2373 * @oppfx 0xf2
2374 * @opunused vex.modrm
2375 * @opcpuid avx
2376 * @optest ->
2377 * @opdone
2378 */
2379
2380
2381/** Opcode VEX.0F 0x2a - invalid */
2382/** Opcode VEX.66.0F 0x2a - invalid */
2383/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2384FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2385/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2386FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2387
2388
2389/**
2390 * @opcode 0x2b
2391 * @opcodesub !11 mr/reg
2392 * @oppfx none
2393 * @opcpuid avx
2394 * @opgroup og_avx_cachect
2395 * @opxcpttype 1
2396 * @optest op1=1 op2=2 -> op1=2
2397 * @optest op1=0 op2=-42 -> op1=-42
2398 * @note Identical implementation to vmovntpd
2399 */
2400FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2401{
2402 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2403 Assert(pVCpu->iem.s.uVexLength <= 1);
2404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2405 if (IEM_IS_MODRM_MEM_MODE(bRm))
2406 {
2407 /*
2408 * memory, register.
2409 */
2410 if (pVCpu->iem.s.uVexLength == 0)
2411 {
2412 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2413 IEM_MC_LOCAL(RTUINT128U, uSrc);
2414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2415
2416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2417 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2418 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2419 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2420
2421 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2423
2424 IEM_MC_ADVANCE_RIP_AND_FINISH();
2425 IEM_MC_END();
2426 }
2427 else
2428 {
2429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2430 IEM_MC_LOCAL(RTUINT256U, uSrc);
2431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2432
2433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2434 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2435 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2436 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2437
2438 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2439 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2440
2441 IEM_MC_ADVANCE_RIP_AND_FINISH();
2442 IEM_MC_END();
2443 }
2444 }
2445 /* The register, register encoding is invalid. */
2446 else
2447 IEMOP_RAISE_INVALID_OPCODE_RET();
2448}
2449
2450/**
2451 * @opcode 0x2b
2452 * @opcodesub !11 mr/reg
2453 * @oppfx 0x66
2454 * @opcpuid avx
2455 * @opgroup og_avx_cachect
2456 * @opxcpttype 1
2457 * @optest op1=1 op2=2 -> op1=2
2458 * @optest op1=0 op2=-42 -> op1=-42
2459 * @note Identical implementation to vmovntps
2460 */
2461FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2462{
2463 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2464 Assert(pVCpu->iem.s.uVexLength <= 1);
2465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2466 if (IEM_IS_MODRM_MEM_MODE(bRm))
2467 {
2468 /*
2469 * memory, register.
2470 */
2471 if (pVCpu->iem.s.uVexLength == 0)
2472 {
2473 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2474 IEM_MC_LOCAL(RTUINT128U, uSrc);
2475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2476
2477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2478 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2479 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2480 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2481
2482 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2483 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2484
2485 IEM_MC_ADVANCE_RIP_AND_FINISH();
2486 IEM_MC_END();
2487 }
2488 else
2489 {
2490 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2491 IEM_MC_LOCAL(RTUINT256U, uSrc);
2492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2493
2494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2495 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2496 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2497 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2498
2499 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2500 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2501
2502 IEM_MC_ADVANCE_RIP_AND_FINISH();
2503 IEM_MC_END();
2504 }
2505 }
2506 /* The register, register encoding is invalid. */
2507 else
2508 IEMOP_RAISE_INVALID_OPCODE_RET();
2509}
2510
2511/**
2512 * @opmnemonic udvexf30f2b
2513 * @opcode 0x2b
2514 * @oppfx 0xf3
2515 * @opunused vex.modrm
2516 * @opcpuid avx
2517 * @optest ->
2518 * @opdone
2519 */
2520
2521/**
2522 * @opmnemonic udvexf20f2b
2523 * @opcode 0x2b
2524 * @oppfx 0xf2
2525 * @opunused vex.modrm
2526 * @opcpuid avx
2527 * @optest ->
2528 * @opdone
2529 */
2530
2531
2532/* Opcode VEX.0F 0x2c - invalid */
2533/* Opcode VEX.66.0F 0x2c - invalid */
2534/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2535FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2536/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2537FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2538
2539/* Opcode VEX.0F 0x2d - invalid */
2540/* Opcode VEX.66.0F 0x2d - invalid */
2541/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2542FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2543/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2544FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2545
2546
2547/**
2548 * @opcode 0x2e
2549 * @oppfx none
2550 * @opflmodify cf,pf,af,zf,sf,of
2551 * @opflclear af,sf,of
2552 */
2553FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2554{
2555 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2557 if (IEM_IS_MODRM_REG_MODE(bRm))
2558 {
2559 /*
2560 * Register, register.
2561 */
2562 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2563 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2564 IEM_MC_LOCAL(uint32_t, fEFlags);
2565 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2566 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2567 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2568 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2569 IEM_MC_PREPARE_AVX_USAGE();
2570 IEM_MC_FETCH_EFLAGS(fEFlags);
2571 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2572 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2573 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2574 pEFlags, uSrc1, uSrc2);
2575 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2576 IEM_MC_COMMIT_EFLAGS(fEFlags);
2577
2578 IEM_MC_ADVANCE_RIP_AND_FINISH();
2579 IEM_MC_END();
2580 }
2581 else
2582 {
2583 /*
2584 * Register, memory.
2585 */
2586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2587 IEM_MC_LOCAL(uint32_t, fEFlags);
2588 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2589 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2590 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2592
2593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2594 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2595 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2596 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2597
2598 IEM_MC_PREPARE_AVX_USAGE();
2599 IEM_MC_FETCH_EFLAGS(fEFlags);
2600 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2601 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2602 pEFlags, uSrc1, uSrc2);
2603 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2604 IEM_MC_COMMIT_EFLAGS(fEFlags);
2605
2606 IEM_MC_ADVANCE_RIP_AND_FINISH();
2607 IEM_MC_END();
2608 }
2609}
2610
2611
2612/**
2613 * @opcode 0x2e
2614 * @oppfx 0x66
2615 * @opflmodify cf,pf,af,zf,sf,of
2616 * @opflclear af,sf,of
2617 */
2618FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2619{
2620 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2622 if (IEM_IS_MODRM_REG_MODE(bRm))
2623 {
2624 /*
2625 * Register, register.
2626 */
2627 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2628 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2629 IEM_MC_LOCAL(uint32_t, fEFlags);
2630 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2631 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2632 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2633 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2634 IEM_MC_PREPARE_AVX_USAGE();
2635 IEM_MC_FETCH_EFLAGS(fEFlags);
2636 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2637 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2638 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2639 pEFlags, uSrc1, uSrc2);
2640 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2641 IEM_MC_COMMIT_EFLAGS(fEFlags);
2642
2643 IEM_MC_ADVANCE_RIP_AND_FINISH();
2644 IEM_MC_END();
2645 }
2646 else
2647 {
2648 /*
2649 * Register, memory.
2650 */
2651 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2652 IEM_MC_LOCAL(uint32_t, fEFlags);
2653 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2654 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2655 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2657
2658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2659 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2660 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2661 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2662
2663 IEM_MC_PREPARE_AVX_USAGE();
2664 IEM_MC_FETCH_EFLAGS(fEFlags);
2665 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2666 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2667 pEFlags, uSrc1, uSrc2);
2668 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2669 IEM_MC_COMMIT_EFLAGS(fEFlags);
2670
2671 IEM_MC_ADVANCE_RIP_AND_FINISH();
2672 IEM_MC_END();
2673 }
2674}
2675
2676
2677/* Opcode VEX.F3.0F 0x2e - invalid */
2678/* Opcode VEX.F2.0F 0x2e - invalid */
2679
2680/**
2681 * @opcode 0x2f
2682 * @oppfx none
2683 * @opflmodify cf,pf,af,zf,sf,of
2684 * @opflclear af,sf,of
2685 */
2686FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2687{
2688 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (IEM_IS_MODRM_REG_MODE(bRm))
2691 {
2692 /*
2693 * Register, register.
2694 */
2695 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2696 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2697 IEM_MC_LOCAL(uint32_t, fEFlags);
2698 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2699 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2700 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2701 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2702 IEM_MC_PREPARE_AVX_USAGE();
2703 IEM_MC_FETCH_EFLAGS(fEFlags);
2704 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2705 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2706 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2707 pEFlags, uSrc1, uSrc2);
2708 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2709 IEM_MC_COMMIT_EFLAGS(fEFlags);
2710
2711 IEM_MC_ADVANCE_RIP_AND_FINISH();
2712 IEM_MC_END();
2713 }
2714 else
2715 {
2716 /*
2717 * Register, memory.
2718 */
2719 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2720 IEM_MC_LOCAL(uint32_t, fEFlags);
2721 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2722 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2723 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2725
2726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2727 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2728 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2729 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2730
2731 IEM_MC_PREPARE_AVX_USAGE();
2732 IEM_MC_FETCH_EFLAGS(fEFlags);
2733 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2734 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2735 pEFlags, uSrc1, uSrc2);
2736 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2737 IEM_MC_COMMIT_EFLAGS(fEFlags);
2738
2739 IEM_MC_ADVANCE_RIP_AND_FINISH();
2740 IEM_MC_END();
2741 }
2742}
2743
2744
2745/**
2746 * @opcode 0x2f
2747 * @oppfx 0x66
2748 * @opflmodify cf,pf,af,zf,sf,of
2749 * @opflclear af,sf,of
2750 */
2751FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2752{
2753 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2755 if (IEM_IS_MODRM_REG_MODE(bRm))
2756 {
2757 /*
2758 * Register, register.
2759 */
2760 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2761 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2762 IEM_MC_LOCAL(uint32_t, fEFlags);
2763 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2764 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2765 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2766 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2767 IEM_MC_PREPARE_AVX_USAGE();
2768 IEM_MC_FETCH_EFLAGS(fEFlags);
2769 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2770 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2771 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2772 pEFlags, uSrc1, uSrc2);
2773 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2774 IEM_MC_COMMIT_EFLAGS(fEFlags);
2775
2776 IEM_MC_ADVANCE_RIP_AND_FINISH();
2777 IEM_MC_END();
2778 }
2779 else
2780 {
2781 /*
2782 * Register, memory.
2783 */
2784 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2785 IEM_MC_LOCAL(uint32_t, fEFlags);
2786 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2787 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2788 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2790
2791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2792 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2793 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2794 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2795
2796 IEM_MC_PREPARE_AVX_USAGE();
2797 IEM_MC_FETCH_EFLAGS(fEFlags);
2798 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2799 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2800 pEFlags, uSrc1, uSrc2);
2801 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2802 IEM_MC_COMMIT_EFLAGS(fEFlags);
2803
2804 IEM_MC_ADVANCE_RIP_AND_FINISH();
2805 IEM_MC_END();
2806 }
2807}
2808
2809
2810/* Opcode VEX.F3.0F 0x2f - invalid */
2811/* Opcode VEX.F2.0F 0x2f - invalid */
2812
2813/* Opcode VEX.0F 0x30 - invalid */
2814/* Opcode VEX.0F 0x31 - invalid */
2815/* Opcode VEX.0F 0x32 - invalid */
2816/* Opcode VEX.0F 0x33 - invalid */
2817/* Opcode VEX.0F 0x34 - invalid */
2818/* Opcode VEX.0F 0x35 - invalid */
2819/* Opcode VEX.0F 0x36 - invalid */
2820/* Opcode VEX.0F 0x37 - invalid */
2821/* Opcode VEX.0F 0x38 - invalid */
2822/* Opcode VEX.0F 0x39 - invalid */
2823/* Opcode VEX.0F 0x3a - invalid */
2824/* Opcode VEX.0F 0x3b - invalid */
2825/* Opcode VEX.0F 0x3c - invalid */
2826/* Opcode VEX.0F 0x3d - invalid */
2827/* Opcode VEX.0F 0x3e - invalid */
2828/* Opcode VEX.0F 0x3f - invalid */
2829/* Opcode VEX.0F 0x40 - invalid */
2830/* Opcode VEX.0F 0x41 - invalid */
2831/* Opcode VEX.0F 0x42 - invalid */
2832/* Opcode VEX.0F 0x43 - invalid */
2833/* Opcode VEX.0F 0x44 - invalid */
2834/* Opcode VEX.0F 0x45 - invalid */
2835/* Opcode VEX.0F 0x46 - invalid */
2836/* Opcode VEX.0F 0x47 - invalid */
2837/* Opcode VEX.0F 0x48 - invalid */
2838/* Opcode VEX.0F 0x49 - invalid */
2839/* Opcode VEX.0F 0x4a - invalid */
2840/* Opcode VEX.0F 0x4b - invalid */
2841/* Opcode VEX.0F 0x4c - invalid */
2842/* Opcode VEX.0F 0x4d - invalid */
2843/* Opcode VEX.0F 0x4e - invalid */
2844/* Opcode VEX.0F 0x4f - invalid */
2845
2846
2847/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2848FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2849{
2850 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2852 if (IEM_IS_MODRM_REG_MODE(bRm))
2853 {
2854 /*
2855 * Register, register.
2856 */
2857 if (pVCpu->iem.s.uVexLength == 0)
2858 {
2859 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2860 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2861 IEM_MC_LOCAL(uint8_t, u8Dst);
2862 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2863 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2864 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2865 IEM_MC_PREPARE_AVX_USAGE();
2866 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2867 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2868 pu8Dst, puSrc);
2869 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2870 IEM_MC_ADVANCE_RIP_AND_FINISH();
2871 IEM_MC_END();
2872 }
2873 else
2874 {
2875 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2876 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2877 IEM_MC_LOCAL(uint8_t, u8Dst);
2878 IEM_MC_LOCAL(RTUINT256U, uSrc);
2879 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2880 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2881
2882 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2883 IEM_MC_PREPARE_AVX_USAGE();
2884 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2885 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2886 pu8Dst, puSrc);
2887 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2888 IEM_MC_ADVANCE_RIP_AND_FINISH();
2889 IEM_MC_END();
2890 }
2891 }
2892 /* No memory operand. */
2893 else
2894 IEMOP_RAISE_INVALID_OPCODE_RET();
2895}
2896
2897
2898/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2899FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2900{
2901 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2903 if (IEM_IS_MODRM_REG_MODE(bRm))
2904 {
2905 /*
2906 * Register, register.
2907 */
2908 if (pVCpu->iem.s.uVexLength == 0)
2909 {
2910 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2911 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2912 IEM_MC_LOCAL(uint8_t, u8Dst);
2913 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2914 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2915 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2916 IEM_MC_PREPARE_AVX_USAGE();
2917 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2918 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2919 pu8Dst, puSrc);
2920 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2921 IEM_MC_ADVANCE_RIP_AND_FINISH();
2922 IEM_MC_END();
2923 }
2924 else
2925 {
2926 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2927 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2928 IEM_MC_LOCAL(uint8_t, u8Dst);
2929 IEM_MC_LOCAL(RTUINT256U, uSrc);
2930 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2931 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2932
2933 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2934 IEM_MC_PREPARE_AVX_USAGE();
2935 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2936 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2937 pu8Dst, puSrc);
2938 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2939 IEM_MC_ADVANCE_RIP_AND_FINISH();
2940 IEM_MC_END();
2941 }
2942 }
2943 /* No memory operand. */
2944 else
2945 IEMOP_RAISE_INVALID_OPCODE_RET();
2946}
2947
2948
2949/* Opcode VEX.F3.0F 0x50 - invalid */
2950/* Opcode VEX.F2.0F 0x50 - invalid */
2951
2952/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2953FNIEMOP_DEF(iemOp_vsqrtps_Vps_Wps)
2954{
2955 IEMOP_MNEMONIC2(VEX_RM, VSQRTPS, vsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2956 IEMOPMEDIAF2_INIT_VARS( vsqrtps);
2957 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2958}
2959
2960
2961/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2962FNIEMOP_DEF(iemOp_vsqrtpd_Vpd_Wpd)
2963{
2964 IEMOP_MNEMONIC2(VEX_RM, VSQRTPD, vsqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2965 IEMOPMEDIAF2_INIT_VARS( vsqrtpd);
2966 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2967}
2968
2969
2970/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2971FNIEMOP_DEF(iemOp_vsqrtss_Vss_Hss_Wss)
2972{
2973 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSS, vsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2974 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
2975 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback));
2976}
2977
2978
2979/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2980FNIEMOP_DEF(iemOp_vsqrtsd_Vsd_Hsd_Wsd)
2981{
2982 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSD, vsqrtsd, Vps, Hps, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2983 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
2984 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback));
2985}
2986
2987
2988/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2989FNIEMOP_DEF(iemOp_vrsqrtps_Vps_Wps)
2990{
2991 IEMOP_MNEMONIC2(VEX_RM, VRSQRTPS, vrsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2992 IEMOPMEDIAF2_INIT_VARS( vrsqrtps);
2993 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2994}
2995
2996
2997/* Opcode VEX.66.0F 0x52 - invalid */
2998
2999
3000/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
3001FNIEMOP_DEF(iemOp_vrsqrtss_Vss_Hss_Wss)
3002{
3003 IEMOP_MNEMONIC3(VEX_RVM, VRSQRTSS, vrsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3004 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3005 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback));
3006}
3007
3008
3009/* Opcode VEX.F2.0F 0x52 - invalid */
3010
3011
3012/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
3013FNIEMOP_DEF(iemOp_vrcpps_Vps_Wps)
3014{
3015 IEMOP_MNEMONIC2(VEX_RM, VRCPPS, vrcpps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3016 IEMOPMEDIAF2_INIT_VARS( vrcpps);
3017 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3018}
3019
3020
3021/* Opcode VEX.66.0F 0x53 - invalid */
3022
3023
3024/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
3025FNIEMOP_DEF(iemOp_vrcpss_Vss_Hss_Wss)
3026{
3027 IEMOP_MNEMONIC3(VEX_RVM, VRCPSS, vrcpss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3028 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3029 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback));
3030}
3031
3032
3033/* Opcode VEX.F2.0F 0x53 - invalid */
3034
3035
3036/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
3037FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
3038{
3039 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3040 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3041 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3042}
3043
3044
3045/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
3046FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
3047{
3048 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3049 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3050 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3051}
3052
3053
3054/* Opcode VEX.F3.0F 0x54 - invalid */
3055/* Opcode VEX.F2.0F 0x54 - invalid */
3056
3057
3058/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
3059FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
3060{
3061 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3062 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3063 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3064}
3065
3066
3067/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
3068FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
3069{
3070 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3071 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3072 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3073}
3074
3075
3076/* Opcode VEX.F3.0F 0x55 - invalid */
3077/* Opcode VEX.F2.0F 0x55 - invalid */
3078
3079/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
3080FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
3081{
3082 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3083 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3084 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3085}
3086
3087
3088/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
3089FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
3090{
3091 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3092 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3093 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3094}
3095
3096
3097/* Opcode VEX.F3.0F 0x56 - invalid */
3098/* Opcode VEX.F2.0F 0x56 - invalid */
3099
3100
3101/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
3102FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
3103{
3104 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3105 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3106 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3107}
3108
3109
3110/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
3111FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
3112{
3113 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3114 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3115 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3116}
3117
3118
3119/* Opcode VEX.F3.0F 0x57 - invalid */
3120/* Opcode VEX.F2.0F 0x57 - invalid */
3121
3122
3123/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
3124FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
3125{
3126 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3127 IEMOPMEDIAF3_INIT_VARS( vaddps);
3128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3129}
3130
3131
3132/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
3133FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
3134{
3135 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3136 IEMOPMEDIAF3_INIT_VARS( vaddpd);
3137 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3138}
3139
3140
3141/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
3142FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
3143{
3144 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3145 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3146 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
3147}
3148
3149
3150/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
3151FNIEMOP_DEF(iemOp_vaddsd_Vsd_Hsd_Wsd)
3152{
3153 IEMOP_MNEMONIC3(VEX_RVM, VADDSD, vaddsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3154 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3155 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback));
3156}
3157
3158
3159/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
3160FNIEMOP_DEF(iemOp_vmulps_Vps_Hps_Wps)
3161{
3162 IEMOP_MNEMONIC3(VEX_RVM, VMULPS, vmulps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3163 IEMOPMEDIAF3_INIT_VARS( vmulps);
3164 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3165}
3166
3167
3168/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
3169FNIEMOP_DEF(iemOp_vmulpd_Vpd_Hpd_Wpd)
3170{
3171 IEMOP_MNEMONIC3(VEX_RVM, VMULPD, vmulpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3172 IEMOPMEDIAF3_INIT_VARS( vmulpd);
3173 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3174}
3175
3176
3177/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
3178FNIEMOP_DEF(iemOp_vmulss_Vss_Hss_Wss)
3179{
3180 IEMOP_MNEMONIC3(VEX_RVM, VMULSS, vmulss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3181 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3182 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback));
3183}
3184
3185
3186/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
3187FNIEMOP_DEF(iemOp_vmulsd_Vsd_Hsd_Wsd)
3188{
3189 IEMOP_MNEMONIC3(VEX_RVM, VMULSD, vmulsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3190 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3191 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback));
3192}
3193
3194
3195/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
3196FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
3197/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
3198FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
3199/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
3200FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
3201/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
3202FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
3203
3204/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
3205FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
3206/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
3207FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
3208/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
3209FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
3210/* Opcode VEX.F2.0F 0x5b - invalid */
3211
3212
3213/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
3214FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
3215{
3216 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3217 IEMOPMEDIAF3_INIT_VARS( vsubps);
3218 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3219}
3220
3221
3222/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
3223FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
3224{
3225 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3226 IEMOPMEDIAF3_INIT_VARS( vsubpd);
3227 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3228}
3229
3230
3231/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
3232FNIEMOP_DEF(iemOp_vsubss_Vss_Hss_Wss)
3233{
3234 IEMOP_MNEMONIC3(VEX_RVM, VSUBSS, vsubss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3235 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3236 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback));
3237}
3238
3239
3240/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
3241FNIEMOP_DEF(iemOp_vsubsd_Vsd_Hsd_Wsd)
3242{
3243 IEMOP_MNEMONIC3(VEX_RVM, VSUBSD, vsubsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3244 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3245 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback));
3246}
3247
3248
3249/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
3250FNIEMOP_DEF(iemOp_vminps_Vps_Hps_Wps)
3251{
3252 IEMOP_MNEMONIC3(VEX_RVM, VMINPS, vminps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3253 IEMOPMEDIAF3_INIT_VARS( vminps);
3254 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3255}
3256
3257
3258/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
3259FNIEMOP_DEF(iemOp_vminpd_Vpd_Hpd_Wpd)
3260{
3261 IEMOP_MNEMONIC3(VEX_RVM, VMINPD, vminpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3262 IEMOPMEDIAF3_INIT_VARS( vminpd);
3263 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3264}
3265
3266
3267/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
3268FNIEMOP_DEF(iemOp_vminss_Vss_Hss_Wss)
3269{
3270 IEMOP_MNEMONIC3(VEX_RVM, VMINSS, vminss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3271 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3272 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback));
3273}
3274
3275
3276/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
3277FNIEMOP_DEF(iemOp_vminsd_Vsd_Hsd_Wsd)
3278{
3279 IEMOP_MNEMONIC3(VEX_RVM, VMINSD, vminsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3280 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3281 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback));
3282}
3283
3284
3285/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
3286FNIEMOP_DEF(iemOp_vdivps_Vps_Hps_Wps)
3287{
3288 IEMOP_MNEMONIC3(VEX_RVM, VDIVPS, vdivps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3289 IEMOPMEDIAF3_INIT_VARS( vdivps);
3290 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3291}
3292
3293
3294/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
3295FNIEMOP_DEF(iemOp_vdivpd_Vpd_Hpd_Wpd)
3296{
3297 IEMOP_MNEMONIC3(VEX_RVM, VDIVPD, vdivpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3298 IEMOPMEDIAF3_INIT_VARS( vdivpd);
3299 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3300}
3301
3302
3303/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
3304FNIEMOP_DEF(iemOp_vdivss_Vss_Hss_Wss)
3305{
3306 IEMOP_MNEMONIC3(VEX_RVM, VDIVSS, vdivss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3307 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3308 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback));
3309}
3310
3311
3312/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
3313FNIEMOP_DEF(iemOp_vdivsd_Vsd_Hsd_Wsd)
3314{
3315 IEMOP_MNEMONIC3(VEX_RVM, VDIVSD, vdivsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3316 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3317 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback));
3318}
3319
3320
3321/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
3322FNIEMOP_DEF(iemOp_vmaxps_Vps_Hps_Wps)
3323{
3324 IEMOP_MNEMONIC3(VEX_RVM, VMAXPS, vmaxps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3325 IEMOPMEDIAF3_INIT_VARS( vmaxps);
3326 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3327}
3328
3329
3330/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
3331FNIEMOP_DEF(iemOp_vmaxpd_Vpd_Hpd_Wpd)
3332{
3333 IEMOP_MNEMONIC3(VEX_RVM, VMAXPD, vmaxpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3334 IEMOPMEDIAF3_INIT_VARS( vmaxpd);
3335 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3336}
3337
3338
3339/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
3340FNIEMOP_DEF(iemOp_vmaxss_Vss_Hss_Wss)
3341{
3342 IEMOP_MNEMONIC3(VEX_RVM, VMAXSS, vmaxss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3343 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3344 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback));
3345}
3346
3347
3348/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
3349FNIEMOP_DEF(iemOp_vmaxsd_Vsd_Hsd_Wsd)
3350{
3351 IEMOP_MNEMONIC3(VEX_RVM, VMAXSD, vmaxsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3352 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3353 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback));
3354}
3355
3356
3357/* Opcode VEX.0F 0x60 - invalid */
3358
3359
3360/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
3361FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
3362{
3363 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3364 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
3365 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3366}
3367
3368
3369/* Opcode VEX.F3.0F 0x60 - invalid */
3370
3371
3372/* Opcode VEX.0F 0x61 - invalid */
3373
3374
3375/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
3376FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
3377{
3378 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3379 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
3380 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3381}
3382
3383
3384/* Opcode VEX.F3.0F 0x61 - invalid */
3385
3386
3387/* Opcode VEX.0F 0x62 - invalid */
3388
3389/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
3390FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
3391{
3392 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3393 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
3394 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3395}
3396
3397
3398/* Opcode VEX.F3.0F 0x62 - invalid */
3399
3400
3401
3402/* Opcode VEX.0F 0x63 - invalid */
3403
3404
3405/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
3406FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
3407{
3408 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3409 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
3410 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3411}
3412
3413
3414/* Opcode VEX.F3.0F 0x63 - invalid */
3415
3416/* Opcode VEX.0F 0x64 - invalid */
3417
3418
3419/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
3420FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
3421{
3422 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3423 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
3424 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3425}
3426
3427
3428/* Opcode VEX.F3.0F 0x64 - invalid */
3429
3430/* Opcode VEX.0F 0x65 - invalid */
3431
3432
3433/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3434FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3435{
3436 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3437 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
3438 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3439}
3440
3441
3442/* Opcode VEX.F3.0F 0x65 - invalid */
3443
3444/* Opcode VEX.0F 0x66 - invalid */
3445
3446
3447/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3448FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3449{
3450 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3451 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3452 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3453}
3454
3455
3456/* Opcode VEX.F3.0F 0x66 - invalid */
3457
3458/* Opcode VEX.0F 0x67 - invalid */
3459
3460
3461/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3462FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3463{
3464 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3465 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3466 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3467}
3468
3469
3470/* Opcode VEX.F3.0F 0x67 - invalid */
3471
3472
3473///**
3474// * Common worker for SSE2 instructions on the form:
3475// * pxxxx xmm1, xmm2/mem128
3476// *
3477// * The 2nd operand is the second half of a register, which in the memory case
3478// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3479// * where it may read the full 128 bits or only the upper 64 bits.
3480// *
3481// * Exceptions type 4.
3482// */
3483//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3484//{
3485// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3486// if (IEM_IS_MODRM_REG_MODE(bRm))
3487// {
3488// /*
3489// * Register, register.
3490// */
3491// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3492// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3493// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3494// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3495// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3496// IEM_MC_PREPARE_SSE_USAGE();
3497// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3498// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3499// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3500// IEM_MC_ADVANCE_RIP_AND_FINISH();
3501// IEM_MC_END();
3502// }
3503// else
3504// {
3505// /*
3506// * Register, memory.
3507// */
3508// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3509// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3510// IEM_MC_LOCAL(RTUINT128U, uSrc);
3511// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3512// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3513//
3514// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3515// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3516// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3517// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3518//
3519// IEM_MC_PREPARE_SSE_USAGE();
3520// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3521// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3522//
3523// IEM_MC_ADVANCE_RIP_AND_FINISH();
3524// IEM_MC_END();
3525// }
3526// return VINF_SUCCESS;
3527//}
3528
3529
3530/* Opcode VEX.0F 0x68 - invalid */
3531
3532/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3533FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3534{
3535 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3536 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3537 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3538}
3539
3540
3541/* Opcode VEX.F3.0F 0x68 - invalid */
3542
3543
3544/* Opcode VEX.0F 0x69 - invalid */
3545
3546
3547/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3548FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3549{
3550 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3551 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3552 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3553}
3554
3555
3556/* Opcode VEX.F3.0F 0x69 - invalid */
3557
3558
3559/* Opcode VEX.0F 0x6a - invalid */
3560
3561
3562/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3563FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3564{
3565 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3566 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3567 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3568}
3569
3570
3571/* Opcode VEX.F3.0F 0x6a - invalid */
3572
3573
3574/* Opcode VEX.0F 0x6b - invalid */
3575
3576
3577/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3578FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3579{
3580 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3581 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3582 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3583}
3584
3585
3586/* Opcode VEX.F3.0F 0x6b - invalid */
3587
3588
3589/* Opcode VEX.0F 0x6c - invalid */
3590
3591
3592/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3593FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3594{
3595 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3596 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3597 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3598}
3599
3600
3601/* Opcode VEX.F3.0F 0x6c - invalid */
3602/* Opcode VEX.F2.0F 0x6c - invalid */
3603
3604
3605/* Opcode VEX.0F 0x6d - invalid */
3606
3607
3608/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3609FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3610{
3611 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3612 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3613 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3614}
3615
3616
3617/* Opcode VEX.F3.0F 0x6d - invalid */
3618
3619
3620/* Opcode VEX.0F 0x6e - invalid */
3621
3622FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3623{
3624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3625 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3626 {
3627 /**
3628 * @opcode 0x6e
3629 * @opcodesub rex.w=1
3630 * @oppfx 0x66
3631 * @opcpuid avx
3632 * @opgroup og_avx_simdint_datamov
3633 * @opxcpttype 5
3634 * @optest 64-bit / op1=1 op2=2 -> op1=2
3635 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3636 */
3637 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3638 if (IEM_IS_MODRM_REG_MODE(bRm))
3639 {
3640 /* XMM, greg64 */
3641 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3642 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3643 IEM_MC_LOCAL(uint64_t, u64Tmp);
3644
3645 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3646 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3647
3648 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3649 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3650
3651 IEM_MC_ADVANCE_RIP_AND_FINISH();
3652 IEM_MC_END();
3653 }
3654 else
3655 {
3656 /* XMM, [mem64] */
3657 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3659 IEM_MC_LOCAL(uint64_t, u64Tmp);
3660
3661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3662 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3663 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3664 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3665
3666 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3667 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3668
3669 IEM_MC_ADVANCE_RIP_AND_FINISH();
3670 IEM_MC_END();
3671 }
3672 }
3673 else
3674 {
3675 /**
3676 * @opdone
3677 * @opcode 0x6e
3678 * @opcodesub rex.w=0
3679 * @oppfx 0x66
3680 * @opcpuid avx
3681 * @opgroup og_avx_simdint_datamov
3682 * @opxcpttype 5
3683 * @opfunction iemOp_vmovd_q_Vy_Ey
3684 * @optest op1=1 op2=2 -> op1=2
3685 * @optest op1=0 op2=-42 -> op1=-42
3686 */
3687 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3688 if (IEM_IS_MODRM_REG_MODE(bRm))
3689 {
3690 /* XMM, greg32 */
3691 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3692 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3693 IEM_MC_LOCAL(uint32_t, u32Tmp);
3694
3695 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3696 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3697
3698 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3699 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3700
3701 IEM_MC_ADVANCE_RIP_AND_FINISH();
3702 IEM_MC_END();
3703 }
3704 else
3705 {
3706 /* XMM, [mem32] */
3707 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3709 IEM_MC_LOCAL(uint32_t, u32Tmp);
3710
3711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3712 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3715
3716 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3717 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3718
3719 IEM_MC_ADVANCE_RIP_AND_FINISH();
3720 IEM_MC_END();
3721 }
3722 }
3723}
3724
3725
3726/* Opcode VEX.F3.0F 0x6e - invalid */
3727
3728
3729/* Opcode VEX.0F 0x6f - invalid */
3730
3731/**
3732 * @opcode 0x6f
3733 * @oppfx 0x66
3734 * @opcpuid avx
3735 * @opgroup og_avx_simdint_datamove
3736 * @opxcpttype 1
3737 * @optest op1=1 op2=2 -> op1=2
3738 * @optest op1=0 op2=-42 -> op1=-42
3739 */
3740FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3741{
3742 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3743 Assert(pVCpu->iem.s.uVexLength <= 1);
3744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3745 if (IEM_IS_MODRM_REG_MODE(bRm))
3746 {
3747 /*
3748 * Register, register.
3749 */
3750 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3751 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3752
3753 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3754 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3755 if (pVCpu->iem.s.uVexLength == 0)
3756 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3757 IEM_GET_MODRM_RM(pVCpu, bRm));
3758 else
3759 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3760 IEM_GET_MODRM_RM(pVCpu, bRm));
3761 IEM_MC_ADVANCE_RIP_AND_FINISH();
3762 IEM_MC_END();
3763 }
3764 else if (pVCpu->iem.s.uVexLength == 0)
3765 {
3766 /*
3767 * Register, memory128.
3768 */
3769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3770 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3772
3773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3774 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3776 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3777
3778 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3779 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3780
3781 IEM_MC_ADVANCE_RIP_AND_FINISH();
3782 IEM_MC_END();
3783 }
3784 else
3785 {
3786 /*
3787 * Register, memory256.
3788 */
3789 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3790 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3792
3793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3794 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3795 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3796 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3797
3798 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3799 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3800
3801 IEM_MC_ADVANCE_RIP_AND_FINISH();
3802 IEM_MC_END();
3803 }
3804}
3805
3806/**
3807 * @opcode 0x6f
3808 * @oppfx 0xf3
3809 * @opcpuid avx
3810 * @opgroup og_avx_simdint_datamove
3811 * @opxcpttype 4UA
3812 * @optest op1=1 op2=2 -> op1=2
3813 * @optest op1=0 op2=-42 -> op1=-42
3814 */
3815FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3816{
3817 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3818 Assert(pVCpu->iem.s.uVexLength <= 1);
3819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3820 if (IEM_IS_MODRM_REG_MODE(bRm))
3821 {
3822 /*
3823 * Register, register.
3824 */
3825 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3826 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3827
3828 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3829 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3830 if (pVCpu->iem.s.uVexLength == 0)
3831 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3832 IEM_GET_MODRM_RM(pVCpu, bRm));
3833 else
3834 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3835 IEM_GET_MODRM_RM(pVCpu, bRm));
3836 IEM_MC_ADVANCE_RIP_AND_FINISH();
3837 IEM_MC_END();
3838 }
3839 else if (pVCpu->iem.s.uVexLength == 0)
3840 {
3841 /*
3842 * Register, memory128.
3843 */
3844 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3845 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3847
3848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3852
3853 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3854 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3855
3856 IEM_MC_ADVANCE_RIP_AND_FINISH();
3857 IEM_MC_END();
3858 }
3859 else
3860 {
3861 /*
3862 * Register, memory256.
3863 */
3864 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3865 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3867
3868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3869 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3870 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3871 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3872
3873 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3874 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3875
3876 IEM_MC_ADVANCE_RIP_AND_FINISH();
3877 IEM_MC_END();
3878 }
3879}
3880
3881
3882/* Opcode VEX.0F 0x70 - invalid */
3883
3884
3885/**
3886 * Common worker for AVX/AVX2 instructions on the forms:
3887 * - vpxxx xmm0, xmm2/mem128, imm8
3888 * - vpxxx ymm0, ymm2/mem256, imm8
3889 *
3890 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3891 */
3892FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3893{
3894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3895 if (IEM_IS_MODRM_REG_MODE(bRm))
3896 {
3897 /*
3898 * Register, register.
3899 */
3900 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3901 if (pVCpu->iem.s.uVexLength)
3902 {
3903 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3904 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3905 IEM_MC_LOCAL(RTUINT256U, uDst);
3906 IEM_MC_LOCAL(RTUINT256U, uSrc);
3907 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3908 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3909 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3910 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3911 IEM_MC_PREPARE_AVX_USAGE();
3912 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3913 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3914 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3915 IEM_MC_ADVANCE_RIP_AND_FINISH();
3916 IEM_MC_END();
3917 }
3918 else
3919 {
3920 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3921 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3922 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3923 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3924 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3925 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3926 IEM_MC_PREPARE_AVX_USAGE();
3927 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3928 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3929 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3930 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3931 IEM_MC_ADVANCE_RIP_AND_FINISH();
3932 IEM_MC_END();
3933 }
3934 }
3935 else
3936 {
3937 /*
3938 * Register, memory.
3939 */
3940 if (pVCpu->iem.s.uVexLength)
3941 {
3942 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3943 IEM_MC_LOCAL(RTUINT256U, uDst);
3944 IEM_MC_LOCAL(RTUINT256U, uSrc);
3945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3946 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3947 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3948
3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3950 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3951 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3952 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3953 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3954 IEM_MC_PREPARE_AVX_USAGE();
3955
3956 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3957 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3958 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3959
3960 IEM_MC_ADVANCE_RIP_AND_FINISH();
3961 IEM_MC_END();
3962 }
3963 else
3964 {
3965 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3966 IEM_MC_LOCAL(RTUINT128U, uSrc);
3967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3968 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3969 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3970
3971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3972 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3973 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3974 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3975 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3976 IEM_MC_PREPARE_AVX_USAGE();
3977
3978 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3979 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3980 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3981 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3982
3983 IEM_MC_ADVANCE_RIP_AND_FINISH();
3984 IEM_MC_END();
3985 }
3986 }
3987}
3988
3989
3990/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3991FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3992{
3993 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3994 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3995 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3996
3997}
3998
3999
4000/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
4001FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
4002{
4003 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4004 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
4005 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
4006
4007}
4008
4009
4010/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
4011FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
4012{
4013 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4014 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
4015 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
4016}
4017
4018
4019/**
4020 * Common worker(s) for AVX/AVX2 instructions on the forms:
4021 * - vpxxx xmm0, xmm2, imm8
4022 * - vpxxx ymm0, ymm2, imm8
4023 *
4024 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4025 */
4026FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
4027{
4028 if (IEM_IS_MODRM_REG_MODE(bRm))
4029 {
4030 /*
4031 * Register, register.
4032 */
4033 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4034 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4035 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
4036 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4037 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4038 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4039 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4040 IEM_MC_PREPARE_AVX_USAGE();
4041 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4042 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4043 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4044 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
4045 IEM_MC_ADVANCE_RIP_AND_FINISH();
4046 IEM_MC_END();
4047 }
4048 /* No memory operand. */
4049 else
4050 IEMOP_RAISE_INVALID_OPCODE_RET();
4051}
4052
4053FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4054{
4055 if (IEM_IS_MODRM_REG_MODE(bRm))
4056 {
4057 /*
4058 * Register, register.
4059 */
4060 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4061 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4062 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
4063 IEM_MC_LOCAL(RTUINT256U, uDst);
4064 IEM_MC_LOCAL(RTUINT256U, uSrc);
4065 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4066 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4067 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4069 IEM_MC_PREPARE_AVX_USAGE();
4070 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4071 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4072 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
4073 IEM_MC_ADVANCE_RIP_AND_FINISH();
4074 IEM_MC_END();
4075 }
4076 /* No memory operand. */
4077 else
4078 IEMOP_RAISE_INVALID_OPCODE_RET();
4079}
4080
4081
4082/* Opcode VEX.0F 0x71 11/2 - invalid. */
4083/** Opcode VEX.66.0F 0x71 11/2. */
4084FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
4085{
4086 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4087 if (pVCpu->iem.s.uVexLength)
4088 {
4089 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4090 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
4091 }
4092 else
4093 {
4094 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4095 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
4096 }
4097}
4098
4099
4100/* Opcode VEX.0F 0x71 11/4 - invalid */
4101/** Opcode VEX.66.0F 0x71 11/4. */
4102FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
4103{
4104 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4105 if (pVCpu->iem.s.uVexLength)
4106 {
4107 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4108 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
4109 }
4110 else
4111 {
4112 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4113 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
4114 }
4115}
4116
4117/* Opcode VEX.0F 0x71 11/6 - invalid */
4118
4119/** Opcode VEX.66.0F 0x71 11/6. */
4120FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
4121{
4122 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4123 if (pVCpu->iem.s.uVexLength)
4124 {
4125 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4126 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
4127 }
4128 else
4129 {
4130 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4131 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
4132 }
4133}
4134
4135
4136/**
4137 * VEX Group 12 jump table for register variant.
4138 */
4139IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
4140{
4141 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4142 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4143 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4144 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4145 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4146 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4147 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4148 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4149};
4150AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
4151
4152
4153/** Opcode VEX.0F 0x71. */
4154FNIEMOP_DEF(iemOp_VGrp12)
4155{
4156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4157 if (IEM_IS_MODRM_REG_MODE(bRm))
4158 /* register, register */
4159 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4160 + pVCpu->iem.s.idxPrefix], bRm);
4161 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4162}
4163
4164
4165/* Opcode VEX.0F 0x72 11/2 - invalid. */
4166/** Opcode VEX.66.0F 0x72 11/2. */
4167FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
4168{
4169 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4170 if (pVCpu->iem.s.uVexLength)
4171 {
4172 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4173 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
4174 }
4175 else
4176 {
4177 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4178 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
4179 }
4180}
4181
4182
4183/* Opcode VEX.0F 0x72 11/4 - invalid. */
4184/** Opcode VEX.66.0F 0x72 11/4. */
4185FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
4186{
4187 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4188 if (pVCpu->iem.s.uVexLength)
4189 {
4190 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4191 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
4192 }
4193 else
4194 {
4195 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4196 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
4197 }
4198}
4199
4200/* Opcode VEX.0F 0x72 11/6 - invalid. */
4201
4202/** Opcode VEX.66.0F 0x72 11/6. */
4203FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
4204{
4205 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4206 if (pVCpu->iem.s.uVexLength)
4207 {
4208 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4209 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
4210 }
4211 else
4212 {
4213 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4214 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
4215 }
4216}
4217
4218
4219/**
4220 * Group 13 jump table for register variant.
4221 */
4222IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
4223{
4224 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4225 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4226 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4227 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4228 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4229 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4230 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4231 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4232};
4233AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
4234
4235/** Opcode VEX.0F 0x72. */
4236FNIEMOP_DEF(iemOp_VGrp13)
4237{
4238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4239 if (IEM_IS_MODRM_REG_MODE(bRm))
4240 /* register, register */
4241 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4242 + pVCpu->iem.s.idxPrefix], bRm);
4243 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4244}
4245
4246
4247/* Opcode VEX.0F 0x73 11/2 - invalid. */
4248/** Opcode VEX.66.0F 0x73 11/2. */
4249FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
4250{
4251 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4252 if (pVCpu->iem.s.uVexLength)
4253 {
4254 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4255 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
4256 }
4257 else
4258 {
4259 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4260 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
4261 }
4262}
4263
4264
4265/** Opcode VEX.66.0F 0x73 11/3. */
4266FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
4267{
4268 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4269 if (pVCpu->iem.s.uVexLength)
4270 {
4271 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4272 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
4273 }
4274 else
4275 {
4276 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4277 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
4278 }
4279}
4280
4281/* Opcode VEX.0F 0x73 11/6 - invalid. */
4282
4283/** Opcode VEX.66.0F 0x73 11/6. */
4284FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
4285{
4286 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4287 if (pVCpu->iem.s.uVexLength)
4288 {
4289 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4290 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
4291 }
4292 else
4293 {
4294 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4295 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
4296 }
4297}
4298
4299/** Opcode VEX.66.0F 0x73 11/7. */
4300FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
4301{
4302 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4303 if (pVCpu->iem.s.uVexLength)
4304 {
4305 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4306 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
4307 }
4308 else
4309 {
4310 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4311 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
4312 }
4313}
4314
4315/* Opcode VEX.0F 0x73 11/6 - invalid. */
4316
4317/**
4318 * Group 14 jump table for register variant.
4319 */
4320IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
4321{
4322 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4323 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4324 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4325 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4326 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4327 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4328 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4329 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4330};
4331AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
4332
4333
4334/** Opcode VEX.0F 0x73. */
4335FNIEMOP_DEF(iemOp_VGrp14)
4336{
4337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4338 if (IEM_IS_MODRM_REG_MODE(bRm))
4339 /* register, register */
4340 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4341 + pVCpu->iem.s.idxPrefix], bRm);
4342 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4343}
4344
4345
4346/* Opcode VEX.0F 0x74 - invalid */
4347
4348
4349/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
4350FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
4351{
4352 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4353 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
4354 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4355}
4356
4357/* Opcode VEX.F3.0F 0x74 - invalid */
4358/* Opcode VEX.F2.0F 0x74 - invalid */
4359
4360
4361/* Opcode VEX.0F 0x75 - invalid */
4362
4363
4364/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
4365FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
4366{
4367 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4368 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
4369 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4370}
4371
4372
4373/* Opcode VEX.F3.0F 0x75 - invalid */
4374/* Opcode VEX.F2.0F 0x75 - invalid */
4375
4376
4377/* Opcode VEX.0F 0x76 - invalid */
4378
4379
4380/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
4381FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
4382{
4383 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4384 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
4385 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4386}
4387
4388
4389/* Opcode VEX.F3.0F 0x76 - invalid */
4390/* Opcode VEX.F2.0F 0x76 - invalid */
4391
4392
4393/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
4394FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
4395{
4396 Assert(pVCpu->iem.s.uVexLength <= 1);
4397 if (pVCpu->iem.s.uVexLength == 0)
4398 {
4399 /*
4400 * 128-bit: vzeroupper
4401 */
4402 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
4403 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4404
4405 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4406 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4407 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4408
4409 IEM_MC_CLEAR_YREG_128_UP(0);
4410 IEM_MC_CLEAR_YREG_128_UP(1);
4411 IEM_MC_CLEAR_YREG_128_UP(2);
4412 IEM_MC_CLEAR_YREG_128_UP(3);
4413 IEM_MC_CLEAR_YREG_128_UP(4);
4414 IEM_MC_CLEAR_YREG_128_UP(5);
4415 IEM_MC_CLEAR_YREG_128_UP(6);
4416 IEM_MC_CLEAR_YREG_128_UP(7);
4417
4418 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4419 {
4420 IEM_MC_CLEAR_YREG_128_UP( 8);
4421 IEM_MC_CLEAR_YREG_128_UP( 9);
4422 IEM_MC_CLEAR_YREG_128_UP(10);
4423 IEM_MC_CLEAR_YREG_128_UP(11);
4424 IEM_MC_CLEAR_YREG_128_UP(12);
4425 IEM_MC_CLEAR_YREG_128_UP(13);
4426 IEM_MC_CLEAR_YREG_128_UP(14);
4427 IEM_MC_CLEAR_YREG_128_UP(15);
4428 }
4429
4430 IEM_MC_ADVANCE_RIP_AND_FINISH();
4431 IEM_MC_END();
4432 }
4433 else
4434 {
4435 /*
4436 * 256-bit: vzeroall
4437 */
4438 IEMOP_MNEMONIC(vzeroall, "vzeroall");
4439 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4440
4441 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4442 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4443 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4444
4445 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
4446 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
4447 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
4448 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
4449 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
4450 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4451 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4452 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4453 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4454
4455 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4456 {
4457 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4458 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
4459 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
4460 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
4461 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
4462 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
4463 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
4464 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
4465 }
4466
4467 IEM_MC_ADVANCE_RIP_AND_FINISH();
4468 IEM_MC_END();
4469 }
4470}
4471
4472
4473/* Opcode VEX.66.0F 0x77 - invalid */
4474/* Opcode VEX.F3.0F 0x77 - invalid */
4475/* Opcode VEX.F2.0F 0x77 - invalid */
4476
4477/* Opcode VEX.0F 0x78 - invalid */
4478/* Opcode VEX.66.0F 0x78 - invalid */
4479/* Opcode VEX.F3.0F 0x78 - invalid */
4480/* Opcode VEX.F2.0F 0x78 - invalid */
4481
4482/* Opcode VEX.0F 0x79 - invalid */
4483/* Opcode VEX.66.0F 0x79 - invalid */
4484/* Opcode VEX.F3.0F 0x79 - invalid */
4485/* Opcode VEX.F2.0F 0x79 - invalid */
4486
4487/* Opcode VEX.0F 0x7a - invalid */
4488/* Opcode VEX.66.0F 0x7a - invalid */
4489/* Opcode VEX.F3.0F 0x7a - invalid */
4490/* Opcode VEX.F2.0F 0x7a - invalid */
4491
4492/* Opcode VEX.0F 0x7b - invalid */
4493/* Opcode VEX.66.0F 0x7b - invalid */
4494/* Opcode VEX.F3.0F 0x7b - invalid */
4495/* Opcode VEX.F2.0F 0x7b - invalid */
4496
4497/* Opcode VEX.0F 0x7c - invalid */
4498
4499
4500/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4501FNIEMOP_DEF(iemOp_vhaddpd_Vpd_Hpd_Wpd)
4502{
4503 IEMOP_MNEMONIC3(VEX_RVM, VHADDPD, vhaddpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4504 IEMOPMEDIAF3_INIT_VARS( vhaddpd);
4505 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4506}
4507
4508
4509/* Opcode VEX.F3.0F 0x7c - invalid */
4510
4511
4512/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4513FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
4514{
4515 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4516 IEMOPMEDIAF3_INIT_VARS( vhaddps);
4517 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4518}
4519
4520
4521/* Opcode VEX.0F 0x7d - invalid */
4522
4523
4524/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4525FNIEMOP_DEF(iemOp_vhsubpd_Vpd_Hpd_Wpd)
4526{
4527 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPD, vhsubpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4528 IEMOPMEDIAF3_INIT_VARS( vhsubpd);
4529 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4530}
4531
4532
4533/* Opcode VEX.F3.0F 0x7d - invalid */
4534
4535
4536/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4537FNIEMOP_DEF(iemOp_vhsubps_Vps_Hps_Wps)
4538{
4539 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPS, vhsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4540 IEMOPMEDIAF3_INIT_VARS( vhsubps);
4541 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4542}
4543
4544
4545/* Opcode VEX.0F 0x7e - invalid */
4546
4547FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4548{
4549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4550 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4551 {
4552 /**
4553 * @opcode 0x7e
4554 * @opcodesub rex.w=1
4555 * @oppfx 0x66
4556 * @opcpuid avx
4557 * @opgroup og_avx_simdint_datamov
4558 * @opxcpttype 5
4559 * @optest 64-bit / op1=1 op2=2 -> op1=2
4560 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4561 */
4562 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4563 if (IEM_IS_MODRM_REG_MODE(bRm))
4564 {
4565 /* greg64, XMM */
4566 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4567 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4568 IEM_MC_LOCAL(uint64_t, u64Tmp);
4569
4570 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4571 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4572
4573 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4574 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4575
4576 IEM_MC_ADVANCE_RIP_AND_FINISH();
4577 IEM_MC_END();
4578 }
4579 else
4580 {
4581 /* [mem64], XMM */
4582 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4584 IEM_MC_LOCAL(uint64_t, u64Tmp);
4585
4586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4587 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4588 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4589 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4590
4591 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4592 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4593
4594 IEM_MC_ADVANCE_RIP_AND_FINISH();
4595 IEM_MC_END();
4596 }
4597 }
4598 else
4599 {
4600 /**
4601 * @opdone
4602 * @opcode 0x7e
4603 * @opcodesub rex.w=0
4604 * @oppfx 0x66
4605 * @opcpuid avx
4606 * @opgroup og_avx_simdint_datamov
4607 * @opxcpttype 5
4608 * @opfunction iemOp_vmovd_q_Vy_Ey
4609 * @optest op1=1 op2=2 -> op1=2
4610 * @optest op1=0 op2=-42 -> op1=-42
4611 */
4612 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4613 if (IEM_IS_MODRM_REG_MODE(bRm))
4614 {
4615 /* greg32, XMM */
4616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4617 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4618 IEM_MC_LOCAL(uint32_t, u32Tmp);
4619
4620 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4621 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4622
4623 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4624 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4625
4626 IEM_MC_ADVANCE_RIP_AND_FINISH();
4627 IEM_MC_END();
4628 }
4629 else
4630 {
4631 /* [mem32], XMM */
4632 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4634 IEM_MC_LOCAL(uint32_t, u32Tmp);
4635
4636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4637 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4638 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4639 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4640
4641 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4642 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4643
4644 IEM_MC_ADVANCE_RIP_AND_FINISH();
4645 IEM_MC_END();
4646 }
4647 }
4648}
4649
4650
4651/**
4652 * @opcode 0x7e
4653 * @oppfx 0xf3
4654 * @opcpuid avx
4655 * @opgroup og_avx_pcksclr_datamove
4656 * @opxcpttype none
4657 * @optest op1=1 op2=2 -> op1=2
4658 * @optest op1=0 op2=-42 -> op1=-42
4659 */
4660FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4661{
4662 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4664 if (IEM_IS_MODRM_REG_MODE(bRm))
4665 {
4666 /*
4667 * Register, register.
4668 */
4669 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4670 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4671
4672 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4673 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4674
4675 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4676 IEM_GET_MODRM_RM(pVCpu, bRm));
4677 IEM_MC_ADVANCE_RIP_AND_FINISH();
4678 IEM_MC_END();
4679 }
4680 else
4681 {
4682 /*
4683 * Memory, register.
4684 */
4685 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4686 IEM_MC_LOCAL(uint64_t, uSrc);
4687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4688
4689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4690 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4691 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4692 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4693
4694 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4695 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4696
4697 IEM_MC_ADVANCE_RIP_AND_FINISH();
4698 IEM_MC_END();
4699 }
4700
4701}
4702/* Opcode VEX.F2.0F 0x7e - invalid */
4703
4704
4705/* Opcode VEX.0F 0x7f - invalid */
4706
4707/**
4708 * @opcode 0x7f
4709 * @oppfx 0x66
4710 * @opcpuid avx
4711 * @opgroup og_avx_simdint_datamove
4712 * @opxcpttype 1
4713 * @optest op1=1 op2=2 -> op1=2
4714 * @optest op1=0 op2=-42 -> op1=-42
4715 */
4716FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4717{
4718 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4719 Assert(pVCpu->iem.s.uVexLength <= 1);
4720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4721 if (IEM_IS_MODRM_REG_MODE(bRm))
4722 {
4723 /*
4724 * Register, register.
4725 */
4726 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4727 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4728
4729 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4730 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4731 if (pVCpu->iem.s.uVexLength == 0)
4732 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4733 IEM_GET_MODRM_REG(pVCpu, bRm));
4734 else
4735 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4736 IEM_GET_MODRM_REG(pVCpu, bRm));
4737 IEM_MC_ADVANCE_RIP_AND_FINISH();
4738 IEM_MC_END();
4739 }
4740 else if (pVCpu->iem.s.uVexLength == 0)
4741 {
4742 /*
4743 * Register, memory128.
4744 */
4745 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4746 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4748
4749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4750 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4751 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4752 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4753
4754 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4755 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4756
4757 IEM_MC_ADVANCE_RIP_AND_FINISH();
4758 IEM_MC_END();
4759 }
4760 else
4761 {
4762 /*
4763 * Register, memory256.
4764 */
4765 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4766 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4768
4769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4770 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4771 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4772 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4773
4774 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4775 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4776
4777 IEM_MC_ADVANCE_RIP_AND_FINISH();
4778 IEM_MC_END();
4779 }
4780}
4781
4782
4783/**
4784 * @opcode 0x7f
4785 * @oppfx 0xf3
4786 * @opcpuid avx
4787 * @opgroup og_avx_simdint_datamove
4788 * @opxcpttype 4UA
4789 * @optest op1=1 op2=2 -> op1=2
4790 * @optest op1=0 op2=-42 -> op1=-42
4791 */
4792FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4793{
4794 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4795 Assert(pVCpu->iem.s.uVexLength <= 1);
4796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4797 if (IEM_IS_MODRM_REG_MODE(bRm))
4798 {
4799 /*
4800 * Register, register.
4801 */
4802 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4803 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4804
4805 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4806 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4807 if (pVCpu->iem.s.uVexLength == 0)
4808 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4809 IEM_GET_MODRM_REG(pVCpu, bRm));
4810 else
4811 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4812 IEM_GET_MODRM_REG(pVCpu, bRm));
4813 IEM_MC_ADVANCE_RIP_AND_FINISH();
4814 IEM_MC_END();
4815 }
4816 else if (pVCpu->iem.s.uVexLength == 0)
4817 {
4818 /*
4819 * Register, memory128.
4820 */
4821 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4822 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4824
4825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4826 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4827 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4828 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4829
4830 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4831 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4832
4833 IEM_MC_ADVANCE_RIP_AND_FINISH();
4834 IEM_MC_END();
4835 }
4836 else
4837 {
4838 /*
4839 * Register, memory256.
4840 */
4841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4842 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4844
4845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4846 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4847 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4848 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4849
4850 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4851 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4852
4853 IEM_MC_ADVANCE_RIP_AND_FINISH();
4854 IEM_MC_END();
4855 }
4856}
4857
4858/* Opcode VEX.F2.0F 0x7f - invalid */
4859
4860
4861/* Opcode VEX.0F 0x80 - invalid */
4862/* Opcode VEX.0F 0x81 - invalid */
4863/* Opcode VEX.0F 0x82 - invalid */
4864/* Opcode VEX.0F 0x83 - invalid */
4865/* Opcode VEX.0F 0x84 - invalid */
4866/* Opcode VEX.0F 0x85 - invalid */
4867/* Opcode VEX.0F 0x86 - invalid */
4868/* Opcode VEX.0F 0x87 - invalid */
4869/* Opcode VEX.0F 0x88 - invalid */
4870/* Opcode VEX.0F 0x89 - invalid */
4871/* Opcode VEX.0F 0x8a - invalid */
4872/* Opcode VEX.0F 0x8b - invalid */
4873/* Opcode VEX.0F 0x8c - invalid */
4874/* Opcode VEX.0F 0x8d - invalid */
4875/* Opcode VEX.0F 0x8e - invalid */
4876/* Opcode VEX.0F 0x8f - invalid */
4877/* Opcode VEX.0F 0x90 - invalid */
4878/* Opcode VEX.0F 0x91 - invalid */
4879/* Opcode VEX.0F 0x92 - invalid */
4880/* Opcode VEX.0F 0x93 - invalid */
4881/* Opcode VEX.0F 0x94 - invalid */
4882/* Opcode VEX.0F 0x95 - invalid */
4883/* Opcode VEX.0F 0x96 - invalid */
4884/* Opcode VEX.0F 0x97 - invalid */
4885/* Opcode VEX.0F 0x98 - invalid */
4886/* Opcode VEX.0F 0x99 - invalid */
4887/* Opcode VEX.0F 0x9a - invalid */
4888/* Opcode VEX.0F 0x9b - invalid */
4889/* Opcode VEX.0F 0x9c - invalid */
4890/* Opcode VEX.0F 0x9d - invalid */
4891/* Opcode VEX.0F 0x9e - invalid */
4892/* Opcode VEX.0F 0x9f - invalid */
4893/* Opcode VEX.0F 0xa0 - invalid */
4894/* Opcode VEX.0F 0xa1 - invalid */
4895/* Opcode VEX.0F 0xa2 - invalid */
4896/* Opcode VEX.0F 0xa3 - invalid */
4897/* Opcode VEX.0F 0xa4 - invalid */
4898/* Opcode VEX.0F 0xa5 - invalid */
4899/* Opcode VEX.0F 0xa6 - invalid */
4900/* Opcode VEX.0F 0xa7 - invalid */
4901/* Opcode VEX.0F 0xa8 - invalid */
4902/* Opcode VEX.0F 0xa9 - invalid */
4903/* Opcode VEX.0F 0xaa - invalid */
4904/* Opcode VEX.0F 0xab - invalid */
4905/* Opcode VEX.0F 0xac - invalid */
4906/* Opcode VEX.0F 0xad - invalid */
4907
4908
4909/* Opcode VEX.0F 0xae mem/0 - invalid. */
4910/* Opcode VEX.0F 0xae mem/1 - invalid. */
4911
4912/**
4913 * @ opmaps grp15
4914 * @ opcode !11/2
4915 * @ oppfx none
4916 * @ opcpuid sse
4917 * @ opgroup og_sse_mxcsrsm
4918 * @ opxcpttype 5
4919 * @ optest op1=0 -> mxcsr=0
4920 * @ optest op1=0x2083 -> mxcsr=0x2083
4921 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4922 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4923 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4924 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4925 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4926 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4927 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4928 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4929 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4930 */
4931FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4932//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4933//{
4934// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4935// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4936// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4937// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4938// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4939// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4940// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4941// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4942// IEM_MC_END();
4943// return VINF_SUCCESS;
4944//}
4945
4946
4947/**
4948 * @opmaps vexgrp15
4949 * @opcode !11/3
4950 * @oppfx none
4951 * @opcpuid avx
4952 * @opgroup og_avx_mxcsrsm
4953 * @opxcpttype 5
4954 * @optest mxcsr=0 -> op1=0
4955 * @optest mxcsr=0x2083 -> op1=0x2083
4956 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4957 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4958 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4959 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4960 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4961 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4962 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4963 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4964 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4965 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4966 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4967 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4968 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4969 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4970 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4971 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4972 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4973 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4974 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4975 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4976 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4977 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4978 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4979 * -> value.xcpt=0x6
4980 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4981 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4982 * APMv4 rev 3.17 page 509.
4983 * @todo Test this instruction on AMD Ryzen.
4984 */
4985FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4986{
4987 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4988 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4989 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4991 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4992 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4993 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4994 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4995 IEM_MC_END();
4996}
4997
4998/* Opcode VEX.0F 0xae mem/4 - invalid. */
4999/* Opcode VEX.0F 0xae mem/5 - invalid. */
5000/* Opcode VEX.0F 0xae mem/6 - invalid. */
5001/* Opcode VEX.0F 0xae mem/7 - invalid. */
5002
5003/* Opcode VEX.0F 0xae 11b/0 - invalid. */
5004/* Opcode VEX.0F 0xae 11b/1 - invalid. */
5005/* Opcode VEX.0F 0xae 11b/2 - invalid. */
5006/* Opcode VEX.0F 0xae 11b/3 - invalid. */
5007/* Opcode VEX.0F 0xae 11b/4 - invalid. */
5008/* Opcode VEX.0F 0xae 11b/5 - invalid. */
5009/* Opcode VEX.0F 0xae 11b/6 - invalid. */
5010/* Opcode VEX.0F 0xae 11b/7 - invalid. */
5011
5012/**
5013 * Vex group 15 jump table for memory variant.
5014 */
5015IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
5016{ /* pfx: none, 066h, 0f3h, 0f2h */
5017 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5018 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5019 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5020 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5021 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5022 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5023 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5024 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5025};
5026AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
5027
5028
5029/** Opcode vex. 0xae. */
5030FNIEMOP_DEF(iemOp_VGrp15)
5031{
5032 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5033 if (IEM_IS_MODRM_REG_MODE(bRm))
5034 /* register, register */
5035 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
5036
5037 /* memory, register */
5038 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5039 + pVCpu->iem.s.idxPrefix], bRm);
5040}
5041
5042
5043/* Opcode VEX.0F 0xaf - invalid. */
5044
5045/* Opcode VEX.0F 0xb0 - invalid. */
5046/* Opcode VEX.0F 0xb1 - invalid. */
5047/* Opcode VEX.0F 0xb2 - invalid. */
5048/* Opcode VEX.0F 0xb2 - invalid. */
5049/* Opcode VEX.0F 0xb3 - invalid. */
5050/* Opcode VEX.0F 0xb4 - invalid. */
5051/* Opcode VEX.0F 0xb5 - invalid. */
5052/* Opcode VEX.0F 0xb6 - invalid. */
5053/* Opcode VEX.0F 0xb7 - invalid. */
5054/* Opcode VEX.0F 0xb8 - invalid. */
5055/* Opcode VEX.0F 0xb9 - invalid. */
5056/* Opcode VEX.0F 0xba - invalid. */
5057/* Opcode VEX.0F 0xbb - invalid. */
5058/* Opcode VEX.0F 0xbc - invalid. */
5059/* Opcode VEX.0F 0xbd - invalid. */
5060/* Opcode VEX.0F 0xbe - invalid. */
5061/* Opcode VEX.0F 0xbf - invalid. */
5062
5063/* Opcode VEX.0F 0xc0 - invalid. */
5064/* Opcode VEX.66.0F 0xc0 - invalid. */
5065/* Opcode VEX.F3.0F 0xc0 - invalid. */
5066/* Opcode VEX.F2.0F 0xc0 - invalid. */
5067
5068/* Opcode VEX.0F 0xc1 - invalid. */
5069/* Opcode VEX.66.0F 0xc1 - invalid. */
5070/* Opcode VEX.F3.0F 0xc1 - invalid. */
5071/* Opcode VEX.F2.0F 0xc1 - invalid. */
5072
5073#define IEMOP_VCMPP_BODY(a_Instr) \
5074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5075 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5076 { \
5077 /* \
5078 * Register, Register. \
5079 */ \
5080 if (pVCpu->iem.s.uVexLength) \
5081 { \
5082 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5083 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5084 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5085 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5086 IEM_MC_PREPARE_AVX_USAGE(); \
5087 IEM_MC_LOCAL(X86YMMREG, uDst); \
5088 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5089 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5090 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5091 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5092 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5093 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5094 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5095 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5096 puDst, puSrc, bImmArg); \
5097 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5098 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5099 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5100 IEM_MC_END(); \
5101 } \
5102 else \
5103 { \
5104 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5105 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5106 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5107 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5108 IEM_MC_PREPARE_AVX_USAGE(); \
5109 IEM_MC_LOCAL(X86XMMREG, uDst); \
5110 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5111 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5112 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5113 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5114 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5115 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5116 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5117 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5118 puDst, puSrc, bImmArg); \
5119 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5120 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5121 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5122 IEM_MC_END(); \
5123 } \
5124 } \
5125 else \
5126 { \
5127 /* \
5128 * Register, Memory. \
5129 */ \
5130 if (pVCpu->iem.s.uVexLength) \
5131 { \
5132 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5135 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5136 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5137 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5138 IEM_MC_PREPARE_AVX_USAGE(); \
5139 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5140 IEM_MC_LOCAL(X86YMMREG, uDst); \
5141 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5142 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5143 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5144 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5145 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5146 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5147 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5148 puDst, puSrc, bImmArg); \
5149 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5150 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5151 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5152 IEM_MC_END(); \
5153 } \
5154 else \
5155 { \
5156 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5159 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5160 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5161 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5162 IEM_MC_PREPARE_AVX_USAGE(); \
5163 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5164 IEM_MC_LOCAL(X86XMMREG, uDst); \
5165 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5166 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5167 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5168 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5169 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5170 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5171 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5172 puDst, puSrc, bImmArg); \
5173 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5174 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5175 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5176 IEM_MC_END(); \
5177 } \
5178 } \
5179 (void)0
5180
5181
5182/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
5183FNIEMOP_DEF(iemOp_vcmpps_Vps_Hps_Wps_Ib)
5184{
5185 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPS, vcmpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5186 IEMOP_VCMPP_BODY(vcmpps);
5187}
5188
5189
5190/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
5191FNIEMOP_DEF(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib)
5192{
5193 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPD, vcmppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5194 IEMOP_VCMPP_BODY(vcmppd);
5195}
5196
5197
5198/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
5199FNIEMOP_DEF(iemOp_vcmpss_Vss_Hss_Wss_Ib)
5200{
5201 IEMOP_MNEMONIC4(VEX_RVMI, CMPSS, cmpss, Vss, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5202
5203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5204 if (IEM_IS_MODRM_REG_MODE(bRm))
5205 {
5206 /*
5207 * XMM32, XMM32.
5208 */
5209 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5210 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5211 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5213 IEM_MC_PREPARE_AVX_USAGE();
5214 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5215 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5216 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5217 IEM_MC_LOCAL(X86XMMREG, uDst);
5218 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5219 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5220 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5221 puDst, puSrc, bImmArg);
5222 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5223 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5224
5225 IEM_MC_ADVANCE_RIP_AND_FINISH();
5226 IEM_MC_END();
5227 }
5228 else
5229 {
5230 /*
5231 * XMM32, [mem32].
5232 */
5233 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5234
5235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5237 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5238 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5239 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5240 IEM_MC_PREPARE_AVX_USAGE();
5241
5242 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5243 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5244 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5245 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5246 IEM_MC_LOCAL(X86XMMREG, uDst);
5247 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5248 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5249 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5250 puDst, puSrc, bImmArg);
5251 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5252 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5253
5254 IEM_MC_ADVANCE_RIP_AND_FINISH();
5255 IEM_MC_END();
5256 }
5257}
5258
5259
5260/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
5261FNIEMOP_DEF(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib)
5262{
5263 IEMOP_MNEMONIC4(VEX_RVMI, CMPSD, cmpsd, Vsd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5264
5265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5266 if (IEM_IS_MODRM_REG_MODE(bRm))
5267 {
5268 /*
5269 * XMM64, XMM64.
5270 */
5271 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5272 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5273 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5274 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5275 IEM_MC_PREPARE_AVX_USAGE();
5276 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5277 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5278 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5279 IEM_MC_LOCAL(X86XMMREG, uDst);
5280 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5281 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5282 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5283 puDst, puSrc, bImmArg);
5284 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5285 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5286
5287 IEM_MC_ADVANCE_RIP_AND_FINISH();
5288 IEM_MC_END();
5289 }
5290 else
5291 {
5292 /*
5293 * XMM64, [mem64].
5294 */
5295 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5296
5297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5299 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5300 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5301 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5302 IEM_MC_PREPARE_AVX_USAGE();
5303
5304 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5305 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5306 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5307 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5308 IEM_MC_LOCAL(X86XMMREG, uDst);
5309 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5310 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5311 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5312 puDst, puSrc, bImmArg);
5313 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5314 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5315
5316 IEM_MC_ADVANCE_RIP_AND_FINISH();
5317 IEM_MC_END();
5318 }
5319}
5320
5321
5322/* Opcode VEX.0F 0xc3 - invalid */
5323/* Opcode VEX.66.0F 0xc3 - invalid */
5324/* Opcode VEX.F3.0F 0xc3 - invalid */
5325/* Opcode VEX.F2.0F 0xc3 - invalid */
5326
5327/* Opcode VEX.0F 0xc4 - invalid */
5328
5329
5330/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
5331FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
5332{
5333 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
5334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5335 if (IEM_IS_MODRM_REG_MODE(bRm))
5336 {
5337 /*
5338 * Register, register.
5339 */
5340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5341 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5342 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5343 IEM_MC_LOCAL(uint16_t, uValue);
5344
5345 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5346 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5347 IEM_MC_PREPARE_AVX_USAGE();
5348
5349 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5350 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
5351 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5352 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5353 IEM_MC_ADVANCE_RIP_AND_FINISH();
5354 IEM_MC_END();
5355 }
5356 else
5357 {
5358 /*
5359 * Register, memory.
5360 */
5361 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5363 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5364 IEM_MC_LOCAL(uint16_t, uValue);
5365
5366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5367 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5368 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5370 IEM_MC_PREPARE_AVX_USAGE();
5371
5372 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5373 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5374 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5375 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5376 IEM_MC_ADVANCE_RIP_AND_FINISH();
5377 IEM_MC_END();
5378 }
5379}
5380
5381
5382/* Opcode VEX.F3.0F 0xc4 - invalid */
5383/* Opcode VEX.F2.0F 0xc4 - invalid */
5384
5385/* Opcode VEX.0F 0xc5 - invalid */
5386
5387
5388/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
5389FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
5390{
5391 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
5392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5393 if (IEM_IS_MODRM_REG_MODE(bRm))
5394 {
5395 /*
5396 * greg32, XMM, imm8.
5397 */
5398 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5399 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5400 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5401 IEM_MC_LOCAL(uint16_t, uValue);
5402 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5403 IEM_MC_PREPARE_AVX_USAGE();
5404 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
5405 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
5406 IEM_MC_ADVANCE_RIP_AND_FINISH();
5407 IEM_MC_END();
5408 }
5409 /* No memory operand. */
5410 else
5411 IEMOP_RAISE_INVALID_OPCODE_RET();
5412}
5413
5414
5415/* Opcode VEX.F3.0F 0xc5 - invalid */
5416/* Opcode VEX.F2.0F 0xc5 - invalid */
5417
5418
5419#define VSHUFP_X(a_Instr) \
5420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5421 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5422 { \
5423 /* \
5424 * Register, register. \
5425 */ \
5426 if (pVCpu->iem.s.uVexLength) \
5427 { \
5428 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5430 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
5431 IEM_MC_LOCAL(RTUINT256U, uDst); \
5432 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
5433 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
5434 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
5435 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
5436 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
5437 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5438 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5439 IEM_MC_PREPARE_AVX_USAGE(); \
5440 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5441 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5442 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
5443 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5444 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5445 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5446 IEM_MC_END(); \
5447 } \
5448 else \
5449 { \
5450 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5451 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5452 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5453 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
5454 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
5455 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
5456 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5458 IEM_MC_PREPARE_AVX_USAGE(); \
5459 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5460 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5461 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5462 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
5463 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5464 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
5465 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5466 IEM_MC_END(); \
5467 } \
5468 } \
5469 else \
5470 { \
5471 /* \
5472 * Register, memory. \
5473 */ \
5474 if (pVCpu->iem.s.uVexLength) \
5475 { \
5476 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5477 IEM_MC_LOCAL(RTUINT256U, uDst); \
5478 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
5479 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
5480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
5482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
5483 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
5484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5485 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5486 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5487 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
5488 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5489 IEM_MC_PREPARE_AVX_USAGE(); \
5490 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5491 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5492 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
5493 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5494 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5495 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5496 IEM_MC_END(); \
5497 } \
5498 else \
5499 { \
5500 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5501 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
5502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5503 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
5504 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
5505 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
5506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5507 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5508 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5509 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5510 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5511 IEM_MC_PREPARE_AVX_USAGE(); \
5512 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5513 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5514 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5515 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
5516 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5517 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
5518 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5519 IEM_MC_END(); \
5520 } \
5521 } \
5522 (void)0
5523
5524/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
5525FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
5526{
5527 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5528 VSHUFP_X(vshufps);
5529}
5530
5531
5532/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
5533FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
5534{
5535 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5536 VSHUFP_X(vshufpd);
5537}
5538#undef VSHUFP_X
5539
5540
5541/* Opcode VEX.F3.0F 0xc6 - invalid */
5542/* Opcode VEX.F2.0F 0xc6 - invalid */
5543
5544/* Opcode VEX.0F 0xc7 - invalid */
5545/* Opcode VEX.66.0F 0xc7 - invalid */
5546/* Opcode VEX.F3.0F 0xc7 - invalid */
5547/* Opcode VEX.F2.0F 0xc7 - invalid */
5548
5549/* Opcode VEX.0F 0xc8 - invalid */
5550/* Opcode VEX.0F 0xc9 - invalid */
5551/* Opcode VEX.0F 0xca - invalid */
5552/* Opcode VEX.0F 0xcb - invalid */
5553/* Opcode VEX.0F 0xcc - invalid */
5554/* Opcode VEX.0F 0xcd - invalid */
5555/* Opcode VEX.0F 0xce - invalid */
5556/* Opcode VEX.0F 0xcf - invalid */
5557
5558
5559/* Opcode VEX.0F 0xd0 - invalid */
5560
5561
5562/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
5563FNIEMOP_DEF(iemOp_vaddsubpd_Vpd_Hpd_Wpd)
5564{
5565 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPD, vaddsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5566 IEMOPMEDIAF3_INIT_VARS( vaddsubpd);
5567 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5568}
5569
5570
5571/* Opcode VEX.F3.0F 0xd0 - invalid */
5572
5573
5574/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
5575FNIEMOP_DEF(iemOp_vaddsubps_Vps_Hps_Wps)
5576{
5577 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPS, vaddsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5578 IEMOPMEDIAF3_INIT_VARS( vaddsubps);
5579 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5580}
5581
5582
5583/* Opcode VEX.0F 0xd1 - invalid */
5584
5585
5586/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
5587FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
5588{
5589 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5590 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
5591 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5592}
5593
5594/* Opcode VEX.F3.0F 0xd1 - invalid */
5595/* Opcode VEX.F2.0F 0xd1 - invalid */
5596
5597/* Opcode VEX.0F 0xd2 - invalid */
5598/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
5599FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
5600{
5601 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5602 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
5603 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5604}
5605
5606/* Opcode VEX.F3.0F 0xd2 - invalid */
5607/* Opcode VEX.F2.0F 0xd2 - invalid */
5608
5609/* Opcode VEX.0F 0xd3 - invalid */
5610/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
5611FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
5612{
5613 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5614 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
5615 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5616}
5617
5618/* Opcode VEX.F3.0F 0xd3 - invalid */
5619/* Opcode VEX.F2.0F 0xd3 - invalid */
5620
5621/* Opcode VEX.0F 0xd4 - invalid */
5622
5623
5624/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
5625FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
5626{
5627 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5628 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
5629 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5630}
5631
5632
5633/* Opcode VEX.F3.0F 0xd4 - invalid */
5634/* Opcode VEX.F2.0F 0xd4 - invalid */
5635
5636/* Opcode VEX.0F 0xd5 - invalid */
5637
5638
5639/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
5640FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
5641{
5642 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5643 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
5644 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5645}
5646
5647
5648/* Opcode VEX.F3.0F 0xd5 - invalid */
5649/* Opcode VEX.F2.0F 0xd5 - invalid */
5650
5651/* Opcode VEX.0F 0xd6 - invalid */
5652
5653/**
5654 * @opcode 0xd6
5655 * @oppfx 0x66
5656 * @opcpuid avx
5657 * @opgroup og_avx_pcksclr_datamove
5658 * @opxcpttype none
5659 * @optest op1=-1 op2=2 -> op1=2
5660 * @optest op1=0 op2=-42 -> op1=-42
5661 */
5662FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
5663{
5664 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5666 if (IEM_IS_MODRM_REG_MODE(bRm))
5667 {
5668 /*
5669 * Register, register.
5670 */
5671 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5672 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5673
5674 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5675 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5676
5677 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5678 IEM_GET_MODRM_REG(pVCpu, bRm));
5679 IEM_MC_ADVANCE_RIP_AND_FINISH();
5680 IEM_MC_END();
5681 }
5682 else
5683 {
5684 /*
5685 * Memory, register.
5686 */
5687 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5688 IEM_MC_LOCAL(uint64_t, uSrc);
5689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5690
5691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5692 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5694 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5695
5696 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5697 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5698
5699 IEM_MC_ADVANCE_RIP_AND_FINISH();
5700 IEM_MC_END();
5701 }
5702}
5703
5704/* Opcode VEX.F3.0F 0xd6 - invalid */
5705/* Opcode VEX.F2.0F 0xd6 - invalid */
5706
5707
5708/* Opcode VEX.0F 0xd7 - invalid */
5709
5710/** Opcode VEX.66.0F 0xd7 - */
5711FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
5712{
5713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5714 /* Docs says register only. */
5715 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
5716 {
5717 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
5718 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
5719 if (pVCpu->iem.s.uVexLength)
5720 {
5721 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5722 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
5723 IEM_MC_ARG(uint64_t *, puDst, 0);
5724 IEM_MC_LOCAL(RTUINT256U, uSrc);
5725 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
5726 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5727 IEM_MC_PREPARE_AVX_USAGE();
5728 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5729 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5730 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
5731 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
5732 IEM_MC_ADVANCE_RIP_AND_FINISH();
5733 IEM_MC_END();
5734 }
5735 else
5736 {
5737 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5738 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5739 IEM_MC_ARG(uint64_t *, puDst, 0);
5740 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5741 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5742 IEM_MC_PREPARE_AVX_USAGE();
5743 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5744 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5745 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
5746 IEM_MC_ADVANCE_RIP_AND_FINISH();
5747 IEM_MC_END();
5748 }
5749 }
5750 else
5751 IEMOP_RAISE_INVALID_OPCODE_RET();
5752}
5753
5754
5755/* Opcode VEX.F3.0F 0xd7 - invalid */
5756/* Opcode VEX.F2.0F 0xd7 - invalid */
5757
5758
5759/* Opcode VEX.0F 0xd8 - invalid */
5760
5761/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5762FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5763{
5764 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5765 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5766 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5767}
5768
5769
5770/* Opcode VEX.F3.0F 0xd8 - invalid */
5771/* Opcode VEX.F2.0F 0xd8 - invalid */
5772
5773/* Opcode VEX.0F 0xd9 - invalid */
5774
5775
5776/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5777FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5778{
5779 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5780 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5781 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5782}
5783
5784
5785/* Opcode VEX.F3.0F 0xd9 - invalid */
5786/* Opcode VEX.F2.0F 0xd9 - invalid */
5787
5788/* Opcode VEX.0F 0xda - invalid */
5789
5790
5791/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
5792FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
5793{
5794 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5795 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
5796 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5797}
5798
5799
5800/* Opcode VEX.F3.0F 0xda - invalid */
5801/* Opcode VEX.F2.0F 0xda - invalid */
5802
5803/* Opcode VEX.0F 0xdb - invalid */
5804
5805
5806/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
5807FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
5808{
5809 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5810 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5811 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
5812}
5813
5814
5815/* Opcode VEX.F3.0F 0xdb - invalid */
5816/* Opcode VEX.F2.0F 0xdb - invalid */
5817
5818/* Opcode VEX.0F 0xdc - invalid */
5819
5820
5821/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
5822FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
5823{
5824 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5825 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
5826 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5827}
5828
5829
5830/* Opcode VEX.F3.0F 0xdc - invalid */
5831/* Opcode VEX.F2.0F 0xdc - invalid */
5832
5833/* Opcode VEX.0F 0xdd - invalid */
5834
5835
5836/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
5837FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
5838{
5839 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5840 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5841 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5842}
5843
5844
5845/* Opcode VEX.F3.0F 0xdd - invalid */
5846/* Opcode VEX.F2.0F 0xdd - invalid */
5847
5848/* Opcode VEX.0F 0xde - invalid */
5849
5850
5851/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5852FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5853{
5854 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5855 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
5856 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5857}
5858
5859
5860/* Opcode VEX.F3.0F 0xde - invalid */
5861/* Opcode VEX.F2.0F 0xde - invalid */
5862
5863/* Opcode VEX.0F 0xdf - invalid */
5864
5865
5866/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5867FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5868{
5869 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5870 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5871 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5872}
5873
5874
5875/* Opcode VEX.F3.0F 0xdf - invalid */
5876/* Opcode VEX.F2.0F 0xdf - invalid */
5877
5878/* Opcode VEX.0F 0xe0 - invalid */
5879
5880
5881/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5882FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5883{
5884 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5885 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5886 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5887}
5888
5889
5890/* Opcode VEX.F3.0F 0xe0 - invalid */
5891/* Opcode VEX.F2.0F 0xe0 - invalid */
5892
5893/* Opcode VEX.0F 0xe1 - invalid */
5894/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5895FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
5896{
5897 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5898 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
5899 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5900}
5901
5902/* Opcode VEX.F3.0F 0xe1 - invalid */
5903/* Opcode VEX.F2.0F 0xe1 - invalid */
5904
5905/* Opcode VEX.0F 0xe2 - invalid */
5906/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5907FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
5908{
5909 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5910 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
5911 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5912}
5913
5914/* Opcode VEX.F3.0F 0xe2 - invalid */
5915/* Opcode VEX.F2.0F 0xe2 - invalid */
5916
5917/* Opcode VEX.0F 0xe3 - invalid */
5918
5919
5920/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5921FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5922{
5923 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5924 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5925 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5926}
5927
5928
5929/* Opcode VEX.F3.0F 0xe3 - invalid */
5930/* Opcode VEX.F2.0F 0xe3 - invalid */
5931
5932/* Opcode VEX.0F 0xe4 - invalid */
5933
5934
5935/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5936FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5937{
5938 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5939 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5940 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5941}
5942
5943
5944/* Opcode VEX.F3.0F 0xe4 - invalid */
5945/* Opcode VEX.F2.0F 0xe4 - invalid */
5946
5947/* Opcode VEX.0F 0xe5 - invalid */
5948
5949
5950/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5951FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5952{
5953 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5954 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5955 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5956}
5957
5958
5959/* Opcode VEX.F3.0F 0xe5 - invalid */
5960/* Opcode VEX.F2.0F 0xe5 - invalid */
5961
5962/* Opcode VEX.0F 0xe6 - invalid */
5963/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5964FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5965/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5966FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5967/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5968FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5969
5970
5971/* Opcode VEX.0F 0xe7 - invalid */
5972
5973/**
5974 * @opcode 0xe7
5975 * @opcodesub !11 mr/reg
5976 * @oppfx 0x66
5977 * @opcpuid avx
5978 * @opgroup og_avx_cachect
5979 * @opxcpttype 1
5980 * @optest op1=-1 op2=2 -> op1=2
5981 * @optest op1=0 op2=-42 -> op1=-42
5982 */
5983FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5984{
5985 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5986 Assert(pVCpu->iem.s.uVexLength <= 1);
5987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5988 if (IEM_IS_MODRM_MEM_MODE(bRm))
5989 {
5990 if (pVCpu->iem.s.uVexLength == 0)
5991 {
5992 /*
5993 * 128-bit: Memory, register.
5994 */
5995 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5996 IEM_MC_LOCAL(RTUINT128U, uSrc);
5997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5998
5999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6000 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6001 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6002 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6003
6004 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
6005 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6006
6007 IEM_MC_ADVANCE_RIP_AND_FINISH();
6008 IEM_MC_END();
6009 }
6010 else
6011 {
6012 /*
6013 * 256-bit: Memory, register.
6014 */
6015 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6016 IEM_MC_LOCAL(RTUINT256U, uSrc);
6017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6018
6019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6020 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6021 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6022 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6023
6024 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6025 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6026
6027 IEM_MC_ADVANCE_RIP_AND_FINISH();
6028 IEM_MC_END();
6029 }
6030 }
6031 /**
6032 * @opdone
6033 * @opmnemonic udvex660fe7reg
6034 * @opcode 0xe7
6035 * @opcodesub 11 mr/reg
6036 * @oppfx 0x66
6037 * @opunused immediate
6038 * @opcpuid avx
6039 * @optest ->
6040 */
6041 else
6042 IEMOP_RAISE_INVALID_OPCODE_RET();
6043}
6044
6045/* Opcode VEX.F3.0F 0xe7 - invalid */
6046/* Opcode VEX.F2.0F 0xe7 - invalid */
6047
6048
6049/* Opcode VEX.0F 0xe8 - invalid */
6050
6051
6052/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
6053FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
6054{
6055 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6056 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
6057 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6058}
6059
6060
6061/* Opcode VEX.F3.0F 0xe8 - invalid */
6062/* Opcode VEX.F2.0F 0xe8 - invalid */
6063
6064/* Opcode VEX.0F 0xe9 - invalid */
6065
6066
6067/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
6068FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
6069{
6070 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6071 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
6072 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6073}
6074
6075
6076/* Opcode VEX.F3.0F 0xe9 - invalid */
6077/* Opcode VEX.F2.0F 0xe9 - invalid */
6078
6079/* Opcode VEX.0F 0xea - invalid */
6080
6081
6082/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
6083FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
6084{
6085 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6086 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
6087 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6088}
6089
6090
6091/* Opcode VEX.F3.0F 0xea - invalid */
6092/* Opcode VEX.F2.0F 0xea - invalid */
6093
6094/* Opcode VEX.0F 0xeb - invalid */
6095
6096
6097/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
6098FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
6099{
6100 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6101 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6102 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
6103}
6104
6105
6106
6107/* Opcode VEX.F3.0F 0xeb - invalid */
6108/* Opcode VEX.F2.0F 0xeb - invalid */
6109
6110/* Opcode VEX.0F 0xec - invalid */
6111
6112
6113/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
6114FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
6115{
6116 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6117 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
6118 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6119}
6120
6121
6122/* Opcode VEX.F3.0F 0xec - invalid */
6123/* Opcode VEX.F2.0F 0xec - invalid */
6124
6125/* Opcode VEX.0F 0xed - invalid */
6126
6127
6128/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
6129FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
6130{
6131 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6132 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
6133 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6134}
6135
6136
6137/* Opcode VEX.F3.0F 0xed - invalid */
6138/* Opcode VEX.F2.0F 0xed - invalid */
6139
6140/* Opcode VEX.0F 0xee - invalid */
6141
6142
6143/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
6144FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
6145{
6146 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6147 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
6148 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6149}
6150
6151
6152/* Opcode VEX.F3.0F 0xee - invalid */
6153/* Opcode VEX.F2.0F 0xee - invalid */
6154
6155
6156/* Opcode VEX.0F 0xef - invalid */
6157
6158
6159/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
6160FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
6161{
6162 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6163 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6164 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
6165}
6166
6167
6168/* Opcode VEX.F3.0F 0xef - invalid */
6169/* Opcode VEX.F2.0F 0xef - invalid */
6170
6171/* Opcode VEX.0F 0xf0 - invalid */
6172/* Opcode VEX.66.0F 0xf0 - invalid */
6173
6174
6175/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
6176FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
6177{
6178 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6179 Assert(pVCpu->iem.s.uVexLength <= 1);
6180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6181 if (IEM_IS_MODRM_REG_MODE(bRm))
6182 {
6183 /*
6184 * Register, register - (not implemented, assuming it raises \#UD).
6185 */
6186 IEMOP_RAISE_INVALID_OPCODE_RET();
6187 }
6188 else if (pVCpu->iem.s.uVexLength == 0)
6189 {
6190 /*
6191 * Register, memory128.
6192 */
6193 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6194 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6196
6197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6198 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6199 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6200 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6201
6202 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6203 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6204
6205 IEM_MC_ADVANCE_RIP_AND_FINISH();
6206 IEM_MC_END();
6207 }
6208 else
6209 {
6210 /*
6211 * Register, memory256.
6212 */
6213 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6214 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
6215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6216
6217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6218 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6219 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6220 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6221
6222 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6223 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
6224
6225 IEM_MC_ADVANCE_RIP_AND_FINISH();
6226 IEM_MC_END();
6227 }
6228}
6229
6230
6231/* Opcode VEX.0F 0xf1 - invalid */
6232/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
6233FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
6234{
6235 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6236 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
6237 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6238}
6239
6240/* Opcode VEX.F2.0F 0xf1 - invalid */
6241
6242/* Opcode VEX.0F 0xf2 - invalid */
6243/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
6244FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
6245{
6246 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6247 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
6248 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6249}
6250/* Opcode VEX.F2.0F 0xf2 - invalid */
6251
6252/* Opcode VEX.0F 0xf3 - invalid */
6253/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
6254FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
6255{
6256 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6257 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
6258 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6259}
6260/* Opcode VEX.F2.0F 0xf3 - invalid */
6261
6262/* Opcode VEX.0F 0xf4 - invalid */
6263
6264
6265/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
6266FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
6267{
6268 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6269 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
6270 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6271}
6272
6273
6274/* Opcode VEX.F2.0F 0xf4 - invalid */
6275
6276/* Opcode VEX.0F 0xf5 - invalid */
6277
6278
6279/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
6280FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
6281{
6282 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6283 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
6284 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6285}
6286
6287
6288/* Opcode VEX.F2.0F 0xf5 - invalid */
6289
6290/* Opcode VEX.0F 0xf6 - invalid */
6291
6292
6293/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
6294FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
6295{
6296 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6297 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
6298 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6299}
6300
6301
6302/* Opcode VEX.F2.0F 0xf6 - invalid */
6303
6304/* Opcode VEX.0F 0xf7 - invalid */
6305
6306
6307/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
6308FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
6309{
6310// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
6311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6312 if (IEM_IS_MODRM_REG_MODE(bRm))
6313 {
6314 /*
6315 * XMM, XMM, (implicit) [ ER]DI
6316 */
6317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6318 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6319 IEM_MC_LOCAL( uint64_t, u64EffAddr);
6320 IEM_MC_LOCAL( RTUINT128U, u128Mem);
6321 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
6322 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
6323 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
6324 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6325 IEM_MC_PREPARE_AVX_USAGE();
6326
6327 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
6328 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
6329 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6330 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
6331 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
6332 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
6333
6334 IEM_MC_ADVANCE_RIP_AND_FINISH();
6335 IEM_MC_END();
6336 }
6337 else
6338 {
6339 /* The memory, register encoding is invalid. */
6340 IEMOP_RAISE_INVALID_OPCODE_RET();
6341 }
6342}
6343
6344
6345/* Opcode VEX.F2.0F 0xf7 - invalid */
6346
6347/* Opcode VEX.0F 0xf8 - invalid */
6348
6349
6350/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
6351FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
6352{
6353 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6354 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
6355 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6356}
6357
6358
6359/* Opcode VEX.F2.0F 0xf8 - invalid */
6360
6361/* Opcode VEX.0F 0xf9 - invalid */
6362
6363
6364/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
6365FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
6366{
6367 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6368 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
6369 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6370}
6371
6372
6373/* Opcode VEX.F2.0F 0xf9 - invalid */
6374
6375/* Opcode VEX.0F 0xfa - invalid */
6376
6377
6378/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
6379FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
6380{
6381 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6382 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
6383 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6384}
6385
6386
6387/* Opcode VEX.F2.0F 0xfa - invalid */
6388
6389/* Opcode VEX.0F 0xfb - invalid */
6390
6391
6392/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
6393FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
6394{
6395 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6396 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
6397 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6398}
6399
6400
6401/* Opcode VEX.F2.0F 0xfb - invalid */
6402
6403/* Opcode VEX.0F 0xfc - invalid */
6404
6405
6406/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
6407FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
6408{
6409 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6410 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
6411 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6412}
6413
6414
6415/* Opcode VEX.F2.0F 0xfc - invalid */
6416
6417/* Opcode VEX.0F 0xfd - invalid */
6418
6419
6420/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
6421FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
6422{
6423 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6424 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
6425 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6426}
6427
6428
6429/* Opcode VEX.F2.0F 0xfd - invalid */
6430
6431/* Opcode VEX.0F 0xfe - invalid */
6432
6433
6434/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
6435FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
6436{
6437 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6438 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
6439 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6440}
6441
6442
6443/* Opcode VEX.F2.0F 0xfe - invalid */
6444
6445
6446/** Opcode **** 0x0f 0xff - UD0 */
6447FNIEMOP_DEF(iemOp_vud0)
6448{
6449/** @todo testcase: vud0 */
6450 IEMOP_MNEMONIC(vud0, "vud0");
6451 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
6452 {
6453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
6454 if (IEM_IS_MODRM_MEM_MODE(bRm))
6455 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
6456 }
6457 IEMOP_HLP_DONE_DECODING();
6458 IEMOP_RAISE_INVALID_OPCODE_RET();
6459}
6460
6461
6462
6463/**
6464 * VEX opcode map \#1.
6465 *
6466 * @sa g_apfnTwoByteMap
6467 */
6468const PFNIEMOP g_apfnVexMap1[] =
6469{
6470 /* no prefix, 066h prefix f3h prefix, f2h prefix */
6471 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
6472 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
6473 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
6474 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
6475 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
6476 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
6477 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
6478 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
6479 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
6480 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
6481 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
6482 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
6483 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
6484 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
6485 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
6486 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
6487
6488 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
6489 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
6490 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
6491 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6492 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6493 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6494 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
6495 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6496 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
6497 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
6498 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
6499 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
6500 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
6501 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
6502 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
6503 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
6504
6505 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
6506 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
6507 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
6508 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
6509 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
6510 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
6511 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
6512 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
6513 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6514 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6515 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
6516 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6517 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
6518 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
6519 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6520 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6521
6522 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
6523 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
6524 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
6525 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
6526 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
6527 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
6528 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
6529 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
6530 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6531 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6532 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6533 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6534 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6535 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6536 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6537 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6538
6539 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
6540 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
6541 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
6542 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
6543 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
6544 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
6545 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
6546 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
6547 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
6548 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
6549 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
6550 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
6551 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
6552 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
6553 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
6554 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
6555
6556 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6557 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
6558 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6559 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6560 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6561 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6562 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6563 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6564 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
6565 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
6566 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
6567 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
6568 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
6569 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
6570 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
6571 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
6572
6573 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6574 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6575 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6576 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6577 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6578 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6579 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6580 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6581 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6582 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6583 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6584 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6585 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6586 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6587 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6588 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
6589
6590 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
6591 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6592 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6593 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6594 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6595 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6596 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6597 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6598 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
6599 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
6600 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
6601 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
6602 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
6603 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
6604 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
6605 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
6606
6607 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
6608 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
6609 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
6610 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
6611 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
6612 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
6613 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
6614 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
6615 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
6616 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
6617 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
6618 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
6619 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
6620 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
6621 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
6622 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
6623
6624 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
6625 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
6626 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
6627 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
6628 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
6629 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
6630 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
6631 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
6632 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
6633 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
6634 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
6635 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
6636 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
6637 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
6638 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
6639 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
6640
6641 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6642 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6643 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6644 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6645 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6646 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6647 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6648 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6649 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6650 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6651 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
6652 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
6653 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
6654 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
6655 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
6656 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
6657
6658 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6659 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6660 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6661 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6662 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6663 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6664 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6665 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6666 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6667 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6668 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
6669 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
6670 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
6671 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
6672 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
6673 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
6674
6675 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6676 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6677 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
6678 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6679 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6680 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6681 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
6682 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6683 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6684 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6685 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
6686 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
6687 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
6688 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
6689 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
6690 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
6691
6692 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
6693 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6694 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6695 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6696 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6697 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6698 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6699 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6700 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6701 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6702 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6703 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6704 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6705 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6706 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6707 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6708
6709 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6710 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6711 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6712 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6713 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6714 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6715 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
6716 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6717 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6718 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6719 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6720 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6721 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6722 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6723 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6724 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6725
6726 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
6727 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6728 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6729 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6730 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6731 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6732 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6733 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6734 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6735 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6736 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6737 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6738 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6739 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6740 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6741 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
6742};
6743AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
6744/** @} */
6745
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette