VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105220

Last change on this file since 105220 was 105220, checked in by vboxsync, 9 months ago

VMM/IEM: Implement vmulpd instruction emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 231.4 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105220 2024-07-09 09:14:17Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
68 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
69 IEM_MC_ADVANCE_RIP_AND_FINISH();
70 IEM_MC_END();
71 }
72 else
73 {
74 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(X86XMMREG, uDst);
80 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
81 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
82 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
84 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
85 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
86 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
87 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
88 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92 }
93 else
94 {
95 /*
96 * Register, memory.
97 */
98 if (pVCpu->iem.s.uVexLength)
99 {
100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
103 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
105 IEM_MC_PREPARE_AVX_USAGE();
106
107 IEM_MC_LOCAL(X86YMMREG, uSrc2);
108 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
109 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
110 IEM_MC_LOCAL(X86YMMREG, uSrc1);
111 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
112 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
113 IEM_MC_LOCAL(X86YMMREG, uDst);
114 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
115 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
116 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
117 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
118 IEM_MC_ADVANCE_RIP_AND_FINISH();
119 IEM_MC_END();
120 }
121 else
122 {
123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
126 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
128 IEM_MC_PREPARE_AVX_USAGE();
129
130 IEM_MC_LOCAL(X86XMMREG, uDst);
131 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
132 IEM_MC_LOCAL(X86XMMREG, uSrc2);
133 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
134 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
135 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
137
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
140 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
141 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_ADVANCE_RIP_AND_FINISH();
143 IEM_MC_END();
144 }
145 }
146}
147
148
149/**
150 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,subss,etc.):
151 * - vxxxss xmm0, xmm1, xmm2/mem32
152 *
153 * Exceptions type 4. AVX cpuid check for 128-bit operation.
154 * Ignores VEX.L, from SDM:
155 * Software should ensure VADDSS is encoded with VEX.L=0.
156 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
157 * across different processor generations.
158 */
159FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
160{
161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
162 if (IEM_IS_MODRM_REG_MODE(bRm))
163 {
164 /*
165 * Register, register.
166 */
167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
170 IEM_MC_PREPARE_AVX_USAGE();
171
172 IEM_MC_LOCAL(X86XMMREG, uDst);
173 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
174 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
175 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
176 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
177 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
178 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
179 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
180 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
195 IEM_MC_PREPARE_AVX_USAGE();
196
197 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
199 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_LOCAL(X86XMMREG, uDst);
201 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
202 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
203 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
206 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
207 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
208 IEM_MC_ADVANCE_RIP_AND_FINISH();
209 IEM_MC_END();
210 }
211}
212
213
214/**
215 * Common worker for scalar AVX/AVX2 instructions on the forms (addsd,subsd,etc.):
216 * - vxxxsd xmm0, xmm1, xmm2/mem64
217 *
218 * Exceptions type 4. AVX cpuid check for 128-bit operation.
219 * Ignores VEX.L, from SDM:
220 * Software should ensure VADDSD is encoded with VEX.L=0.
221 * Encoding VADDSD with VEX.L=1 may encounter unpredictable behavior
222 * across different processor generations.
223 */
224FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R64, PFNIEMAIMPLFPAVXF3U128R64, pfnU128)
225{
226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
227 if (IEM_IS_MODRM_REG_MODE(bRm))
228 {
229 /*
230 * Register, register.
231 */
232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
233 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
235 IEM_MC_PREPARE_AVX_USAGE();
236
237 IEM_MC_LOCAL(X86XMMREG, uDst);
238 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
239 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
240 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
241 IEM_MC_ARG(PCRTFLOAT64U, pr64Src2, 2);
242 IEM_MC_REF_XREG_R64_CONST(pr64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
243 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
244 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
245 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
246 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_ADVANCE_RIP_AND_FINISH();
248 IEM_MC_END();
249 }
250 else
251 {
252 /*
253 * Register, memory.
254 */
255 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
258 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
260 IEM_MC_PREPARE_AVX_USAGE();
261
262 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
263 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
264 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
265 IEM_MC_LOCAL(X86XMMREG, uDst);
266 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
267 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
268 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
269 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
270 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
271 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
272 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
273 IEM_MC_ADVANCE_RIP_AND_FINISH();
274 IEM_MC_END();
275 }
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpxxx xmm0, xmm1, xmm2/mem128
282 * - vpxxx ymm0, ymm1, ymm2/mem256
283 *
284 * Takes function table for function w/o implicit state parameter.
285 *
286 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
287 */
288FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
289{
290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
291 if (IEM_IS_MODRM_REG_MODE(bRm))
292 {
293 /*
294 * Register, register.
295 */
296 if (pVCpu->iem.s.uVexLength)
297 {
298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
299 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
300 IEM_MC_LOCAL(RTUINT256U, uDst);
301 IEM_MC_LOCAL(RTUINT256U, uSrc1);
302 IEM_MC_LOCAL(RTUINT256U, uSrc2);
303 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
305 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
307 IEM_MC_PREPARE_AVX_USAGE();
308 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
309 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP_AND_FINISH();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
318 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
321 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
323 IEM_MC_PREPARE_AVX_USAGE();
324 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
325 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
326 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
328 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332 }
333 else
334 {
335 /*
336 * Register, memory.
337 */
338 if (pVCpu->iem.s.uVexLength)
339 {
340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
341 IEM_MC_LOCAL(RTUINT256U, uDst);
342 IEM_MC_LOCAL(RTUINT256U, uSrc1);
343 IEM_MC_LOCAL(RTUINT256U, uSrc2);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
347 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_PREPARE_AVX_USAGE();
353
354 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
357 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
358
359 IEM_MC_ADVANCE_RIP_AND_FINISH();
360 IEM_MC_END();
361 }
362 else
363 {
364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
365 IEM_MC_LOCAL(RTUINT128U, uSrc2);
366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
368 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
370
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
372 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
374 IEM_MC_PREPARE_AVX_USAGE();
375
376 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
377 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
378 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
380 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
381
382 IEM_MC_ADVANCE_RIP_AND_FINISH();
383 IEM_MC_END();
384 }
385 }
386}
387
388
389/**
390 * Common worker for AVX2 instructions on the forms:
391 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
392 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
393 *
394 * The 128-bit memory version of this instruction may elect to skip fetching the
395 * lower 64 bits of the operand. We, however, do not.
396 *
397 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
398 */
399FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
400{
401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
402}
403
404
405/**
406 * Common worker for AVX2 instructions on the forms:
407 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
408 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
409 *
410 * The 128-bit memory version of this instruction may elect to skip fetching the
411 * higher 64 bits of the operand. We, however, do not.
412 *
413 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
414 */
415FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
416{
417 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
418}
419
420
421/**
422 * Common worker for AVX2 instructions on the forms:
423 * - vpxxx xmm0, xmm1/mem128
424 * - vpxxx ymm0, ymm1/mem256
425 *
426 * Takes function table for function w/o implicit state parameter.
427 *
428 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
429 */
430FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
431{
432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
433 if (IEM_IS_MODRM_REG_MODE(bRm))
434 {
435 /*
436 * Register, register.
437 */
438 if (pVCpu->iem.s.uVexLength)
439 {
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
442 IEM_MC_LOCAL(RTUINT256U, uDst);
443 IEM_MC_LOCAL(RTUINT256U, uSrc);
444 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
445 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
447 IEM_MC_PREPARE_AVX_USAGE();
448 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451 IEM_MC_ADVANCE_RIP_AND_FINISH();
452 IEM_MC_END();
453 }
454 else
455 {
456 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
458 IEM_MC_ARG(PRTUINT128U, puDst, 0);
459 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
461 IEM_MC_PREPARE_AVX_USAGE();
462 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
463 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
464 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
466 IEM_MC_ADVANCE_RIP_AND_FINISH();
467 IEM_MC_END();
468 }
469 }
470 else
471 {
472 /*
473 * Register, memory.
474 */
475 if (pVCpu->iem.s.uVexLength)
476 {
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uDst);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
483
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
485 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
487 IEM_MC_PREPARE_AVX_USAGE();
488
489 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
490 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
491 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
492
493 IEM_MC_ADVANCE_RIP_AND_FINISH();
494 IEM_MC_END();
495 }
496 else
497 {
498 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
499 IEM_MC_LOCAL(RTUINT128U, uSrc);
500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
501 IEM_MC_ARG(PRTUINT128U, puDst, 0);
502 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
506 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
507 IEM_MC_PREPARE_AVX_USAGE();
508
509 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
510 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
511 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
512 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
513
514 IEM_MC_ADVANCE_RIP_AND_FINISH();
515 IEM_MC_END();
516 }
517 }
518}
519
520
521/* Opcode VEX.0F 0x00 - invalid */
522/* Opcode VEX.0F 0x01 - invalid */
523/* Opcode VEX.0F 0x02 - invalid */
524/* Opcode VEX.0F 0x03 - invalid */
525/* Opcode VEX.0F 0x04 - invalid */
526/* Opcode VEX.0F 0x05 - invalid */
527/* Opcode VEX.0F 0x06 - invalid */
528/* Opcode VEX.0F 0x07 - invalid */
529/* Opcode VEX.0F 0x08 - invalid */
530/* Opcode VEX.0F 0x09 - invalid */
531/* Opcode VEX.0F 0x0a - invalid */
532
533/** Opcode VEX.0F 0x0b. */
534FNIEMOP_DEF(iemOp_vud2)
535{
536 IEMOP_MNEMONIC(vud2, "vud2");
537 IEMOP_RAISE_INVALID_OPCODE_RET();
538}
539
540/* Opcode VEX.0F 0x0c - invalid */
541/* Opcode VEX.0F 0x0d - invalid */
542/* Opcode VEX.0F 0x0e - invalid */
543/* Opcode VEX.0F 0x0f - invalid */
544
545
546/**
547 * @opcode 0x10
548 * @oppfx none
549 * @opcpuid avx
550 * @opgroup og_avx_simdfp_datamove
551 * @opxcpttype 4UA
552 * @optest op1=1 op2=2 -> op1=2
553 * @optest op1=0 op2=-22 -> op1=-22
554 */
555FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
556{
557 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
558 Assert(pVCpu->iem.s.uVexLength <= 1);
559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
560 if (IEM_IS_MODRM_REG_MODE(bRm))
561 {
562 /*
563 * Register, register.
564 */
565 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
566 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
567 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
568 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
569 if (pVCpu->iem.s.uVexLength == 0)
570 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
571 IEM_GET_MODRM_RM(pVCpu, bRm));
572 else
573 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
574 IEM_GET_MODRM_RM(pVCpu, bRm));
575 IEM_MC_ADVANCE_RIP_AND_FINISH();
576 IEM_MC_END();
577 }
578 else if (pVCpu->iem.s.uVexLength == 0)
579 {
580 /*
581 * 128-bit: Register, Memory
582 */
583 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
584 IEM_MC_LOCAL(RTUINT128U, uSrc);
585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
586
587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
588 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
589 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
590 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
591
592 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
593 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
594
595 IEM_MC_ADVANCE_RIP_AND_FINISH();
596 IEM_MC_END();
597 }
598 else
599 {
600 /*
601 * 256-bit: Register, Memory
602 */
603 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
604 IEM_MC_LOCAL(RTUINT256U, uSrc);
605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
606
607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
608 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
609 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
610 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
611
612 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
613 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
614
615 IEM_MC_ADVANCE_RIP_AND_FINISH();
616 IEM_MC_END();
617 }
618}
619
620
621/**
622 * @opcode 0x10
623 * @oppfx 0x66
624 * @opcpuid avx
625 * @opgroup og_avx_simdfp_datamove
626 * @opxcpttype 4UA
627 * @optest op1=1 op2=2 -> op1=2
628 * @optest op1=0 op2=-22 -> op1=-22
629 */
630FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
631{
632 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
633 Assert(pVCpu->iem.s.uVexLength <= 1);
634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
635 if (IEM_IS_MODRM_REG_MODE(bRm))
636 {
637 /*
638 * Register, register.
639 */
640 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
641 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
642 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
643 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
644 if (pVCpu->iem.s.uVexLength == 0)
645 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
646 IEM_GET_MODRM_RM(pVCpu, bRm));
647 else
648 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
649 IEM_GET_MODRM_RM(pVCpu, bRm));
650 IEM_MC_ADVANCE_RIP_AND_FINISH();
651 IEM_MC_END();
652 }
653 else if (pVCpu->iem.s.uVexLength == 0)
654 {
655 /*
656 * 128-bit: Memory, register.
657 */
658 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
659 IEM_MC_LOCAL(RTUINT128U, uSrc);
660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
661
662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
663 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
664 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
665 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
666
667 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
668 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
669
670 IEM_MC_ADVANCE_RIP_AND_FINISH();
671 IEM_MC_END();
672 }
673 else
674 {
675 /*
676 * 256-bit: Memory, register.
677 */
678 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
679 IEM_MC_LOCAL(RTUINT256U, uSrc);
680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
681
682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
683 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
684 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
685 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
686
687 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
688 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
689
690 IEM_MC_ADVANCE_RIP_AND_FINISH();
691 IEM_MC_END();
692 }
693}
694
695
696FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
697{
698 Assert(pVCpu->iem.s.uVexLength <= 1);
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /**
703 * @opcode 0x10
704 * @oppfx 0xf3
705 * @opcodesub 11 mr/reg
706 * @opcpuid avx
707 * @opgroup og_avx_simdfp_datamerge
708 * @opxcpttype 5
709 * @optest op1=1 op2=0 op3=2 -> op1=2
710 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
711 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
712 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
713 * @note HssHi refers to bits 127:32.
714 */
715 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
716 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
717 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
718 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
719 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
720 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
721 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
722 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
723 IEM_MC_ADVANCE_RIP_AND_FINISH();
724 IEM_MC_END();
725 }
726 else
727 {
728 /**
729 * @opdone
730 * @opcode 0x10
731 * @oppfx 0xf3
732 * @opcodesub !11 mr/reg
733 * @opcpuid avx
734 * @opgroup og_avx_simdfp_datamove
735 * @opxcpttype 5
736 * @opfunction iemOp_vmovss_Vss_Hss_Wss
737 * @optest op1=1 op2=2 -> op1=2
738 * @optest op1=0 op2=-22 -> op1=-22
739 */
740 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
741 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
742 IEM_MC_LOCAL(uint32_t, uSrc);
743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
744
745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
746 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
747 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
748 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
749
750 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
751 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
752
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756}
757
758
759FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
760{
761 Assert(pVCpu->iem.s.uVexLength <= 1);
762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
763 if (IEM_IS_MODRM_REG_MODE(bRm))
764 {
765 /**
766 * @opcode 0x10
767 * @oppfx 0xf2
768 * @opcodesub 11 mr/reg
769 * @opcpuid avx
770 * @opgroup og_avx_simdfp_datamerge
771 * @opxcpttype 5
772 * @optest op1=1 op2=0 op3=2 -> op1=2
773 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
774 * @optest op1=3 op2=-1 op3=0x77 ->
775 * op1=0xffffffffffffffff0000000000000077
776 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
777 */
778 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
779 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
780 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
781
782 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
783 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
784 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
785 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
786 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
787 IEM_MC_ADVANCE_RIP_AND_FINISH();
788 IEM_MC_END();
789 }
790 else
791 {
792 /**
793 * @opdone
794 * @opcode 0x10
795 * @oppfx 0xf2
796 * @opcodesub !11 mr/reg
797 * @opcpuid avx
798 * @opgroup og_avx_simdfp_datamove
799 * @opxcpttype 5
800 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
801 * @optest op1=1 op2=2 -> op1=2
802 * @optest op1=0 op2=-22 -> op1=-22
803 */
804 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
805 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
806 IEM_MC_LOCAL(uint64_t, uSrc);
807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
808
809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
810 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
811 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
812 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
813
814 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
815 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
816
817 IEM_MC_ADVANCE_RIP_AND_FINISH();
818 IEM_MC_END();
819 }
820}
821
822
823/**
824 * @opcode 0x11
825 * @oppfx none
826 * @opcpuid avx
827 * @opgroup og_avx_simdfp_datamove
828 * @opxcpttype 4UA
829 * @optest op1=1 op2=2 -> op1=2
830 * @optest op1=0 op2=-22 -> op1=-22
831 */
832FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
833{
834 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
835 Assert(pVCpu->iem.s.uVexLength <= 1);
836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
837 if (IEM_IS_MODRM_REG_MODE(bRm))
838 {
839 /*
840 * Register, register.
841 */
842 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
843 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
844 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
845 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
846 if (pVCpu->iem.s.uVexLength == 0)
847 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
848 IEM_GET_MODRM_REG(pVCpu, bRm));
849 else
850 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
851 IEM_GET_MODRM_REG(pVCpu, bRm));
852 IEM_MC_ADVANCE_RIP_AND_FINISH();
853 IEM_MC_END();
854 }
855 else if (pVCpu->iem.s.uVexLength == 0)
856 {
857 /*
858 * 128-bit: Memory, register.
859 */
860 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
861 IEM_MC_LOCAL(RTUINT128U, uSrc);
862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
863
864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
865 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
866 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
867 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
868
869 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
870 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
871
872 IEM_MC_ADVANCE_RIP_AND_FINISH();
873 IEM_MC_END();
874 }
875 else
876 {
877 /*
878 * 256-bit: Memory, register.
879 */
880 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
881 IEM_MC_LOCAL(RTUINT256U, uSrc);
882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
883
884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
885 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
886 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
887 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
888
889 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
890 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
891
892 IEM_MC_ADVANCE_RIP_AND_FINISH();
893 IEM_MC_END();
894 }
895}
896
897
898/**
899 * @opcode 0x11
900 * @oppfx 0x66
901 * @opcpuid avx
902 * @opgroup og_avx_simdfp_datamove
903 * @opxcpttype 4UA
904 * @optest op1=1 op2=2 -> op1=2
905 * @optest op1=0 op2=-22 -> op1=-22
906 */
907FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
908{
909 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
910 Assert(pVCpu->iem.s.uVexLength <= 1);
911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
912 if (IEM_IS_MODRM_REG_MODE(bRm))
913 {
914 /*
915 * Register, register.
916 */
917 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
918 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
919 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
920 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
921 if (pVCpu->iem.s.uVexLength == 0)
922 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
923 IEM_GET_MODRM_REG(pVCpu, bRm));
924 else
925 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
926 IEM_GET_MODRM_REG(pVCpu, bRm));
927 IEM_MC_ADVANCE_RIP_AND_FINISH();
928 IEM_MC_END();
929 }
930 else if (pVCpu->iem.s.uVexLength == 0)
931 {
932 /*
933 * 128-bit: Memory, register.
934 */
935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
936 IEM_MC_LOCAL(RTUINT128U, uSrc);
937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
938
939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
940 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
941 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
942 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
943
944 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
945 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
946
947 IEM_MC_ADVANCE_RIP_AND_FINISH();
948 IEM_MC_END();
949 }
950 else
951 {
952 /*
953 * 256-bit: Memory, register.
954 */
955 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
956 IEM_MC_LOCAL(RTUINT256U, uSrc);
957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
958
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
961 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
962 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
963
964 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
965 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
966
967 IEM_MC_ADVANCE_RIP_AND_FINISH();
968 IEM_MC_END();
969 }
970}
971
972
973FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
974{
975 Assert(pVCpu->iem.s.uVexLength <= 1);
976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
977 if (IEM_IS_MODRM_REG_MODE(bRm))
978 {
979 /**
980 * @opcode 0x11
981 * @oppfx 0xf3
982 * @opcodesub 11 mr/reg
983 * @opcpuid avx
984 * @opgroup og_avx_simdfp_datamerge
985 * @opxcpttype 5
986 * @optest op1=1 op2=0 op3=2 -> op1=2
987 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
988 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
989 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
990 */
991 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
992 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
993 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
994
995 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
996 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
997 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
998 IEM_GET_MODRM_REG(pVCpu, bRm),
999 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1000 IEM_MC_ADVANCE_RIP_AND_FINISH();
1001 IEM_MC_END();
1002 }
1003 else
1004 {
1005 /**
1006 * @opdone
1007 * @opcode 0x11
1008 * @oppfx 0xf3
1009 * @opcodesub !11 mr/reg
1010 * @opcpuid avx
1011 * @opgroup og_avx_simdfp_datamove
1012 * @opxcpttype 5
1013 * @opfunction iemOp_vmovss_Vss_Hss_Wss
1014 * @optest op1=1 op2=2 -> op1=2
1015 * @optest op1=0 op2=-22 -> op1=-22
1016 */
1017 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1018 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1019 IEM_MC_LOCAL(uint32_t, uSrc);
1020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1021
1022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1023 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1024 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1025 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1026
1027 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1028 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1029
1030 IEM_MC_ADVANCE_RIP_AND_FINISH();
1031 IEM_MC_END();
1032 }
1033}
1034
1035
1036FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
1037{
1038 Assert(pVCpu->iem.s.uVexLength <= 1);
1039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1040 if (IEM_IS_MODRM_REG_MODE(bRm))
1041 {
1042 /**
1043 * @opcode 0x11
1044 * @oppfx 0xf2
1045 * @opcodesub 11 mr/reg
1046 * @opcpuid avx
1047 * @opgroup og_avx_simdfp_datamerge
1048 * @opxcpttype 5
1049 * @optest op1=1 op2=0 op3=2 -> op1=2
1050 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
1051 * @optest op1=3 op2=-1 op3=0x77 ->
1052 * op1=0xffffffffffffffff0000000000000077
1053 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
1054 */
1055 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1056 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1057 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1058
1059 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1060 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1061 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1062 IEM_GET_MODRM_REG(pVCpu, bRm),
1063 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1064 IEM_MC_ADVANCE_RIP_AND_FINISH();
1065 IEM_MC_END();
1066 }
1067 else
1068 {
1069 /**
1070 * @opdone
1071 * @opcode 0x11
1072 * @oppfx 0xf2
1073 * @opcodesub !11 mr/reg
1074 * @opcpuid avx
1075 * @opgroup og_avx_simdfp_datamove
1076 * @opxcpttype 5
1077 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1078 * @optest op1=1 op2=2 -> op1=2
1079 * @optest op1=0 op2=-22 -> op1=-22
1080 */
1081 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1082 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1083 IEM_MC_LOCAL(uint64_t, uSrc);
1084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1085
1086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1087 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1088 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1089 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1090
1091 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1092 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1093
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097}
1098
1099
1100FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1101{
1102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1103 if (IEM_IS_MODRM_REG_MODE(bRm))
1104 {
1105 /**
1106 * @opcode 0x12
1107 * @opcodesub 11 mr/reg
1108 * @oppfx none
1109 * @opcpuid avx
1110 * @opgroup og_avx_simdfp_datamerge
1111 * @opxcpttype 7LZ
1112 * @optest op2=0x2200220122022203
1113 * op3=0x3304330533063307
1114 * -> op1=0x22002201220222033304330533063307
1115 * @optest op2=-1 op3=-42 -> op1=-42
1116 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1117 */
1118 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1119 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1120 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1121
1122 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1123 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1124 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1125 IEM_GET_MODRM_RM(pVCpu, bRm),
1126 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1127
1128 IEM_MC_ADVANCE_RIP_AND_FINISH();
1129 IEM_MC_END();
1130 }
1131 else
1132 {
1133 /**
1134 * @opdone
1135 * @opcode 0x12
1136 * @opcodesub !11 mr/reg
1137 * @oppfx none
1138 * @opcpuid avx
1139 * @opgroup og_avx_simdfp_datamove
1140 * @opxcpttype 5LZ
1141 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1142 * @optest op1=1 op2=0 op3=0 -> op1=0
1143 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1144 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1145 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1146 */
1147 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1148
1149 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1150 IEM_MC_LOCAL(uint64_t, uSrc);
1151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1152
1153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1154 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1155 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1156 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1157
1158 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1159 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1160 uSrc,
1161 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1162
1163 IEM_MC_ADVANCE_RIP_AND_FINISH();
1164 IEM_MC_END();
1165 }
1166}
1167
1168
1169/**
1170 * @opcode 0x12
1171 * @opcodesub !11 mr/reg
1172 * @oppfx 0x66
1173 * @opcpuid avx
1174 * @opgroup og_avx_pcksclr_datamerge
1175 * @opxcpttype 5LZ
1176 * @optest op2=0 op3=2 -> op1=2
1177 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1178 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1179 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1180 */
1181FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1182{
1183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1184 if (IEM_IS_MODRM_MEM_MODE(bRm))
1185 {
1186 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1187
1188 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1189 IEM_MC_LOCAL(uint64_t, uSrc);
1190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1191
1192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1193 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1195 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1196
1197 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1198 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1199 uSrc,
1200 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1201
1202 IEM_MC_ADVANCE_RIP_AND_FINISH();
1203 IEM_MC_END();
1204 }
1205
1206 /**
1207 * @opdone
1208 * @opmnemonic udvex660f12m3
1209 * @opcode 0x12
1210 * @opcodesub 11 mr/reg
1211 * @oppfx 0x66
1212 * @opunused immediate
1213 * @opcpuid avx
1214 * @optest ->
1215 */
1216 else
1217 IEMOP_RAISE_INVALID_OPCODE_RET();
1218}
1219
1220
1221/**
1222 * @opcode 0x12
1223 * @oppfx 0xf3
1224 * @opcpuid avx
1225 * @opgroup og_avx_pcksclr_datamove
1226 * @opxcpttype 4
1227 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1228 * -> op1=0x00000002000000020000000100000001
1229 * @optest vex.l==1 /
1230 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1231 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1232 */
1233FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1234{
1235 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1236 Assert(pVCpu->iem.s.uVexLength <= 1);
1237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1238 if (IEM_IS_MODRM_REG_MODE(bRm))
1239 {
1240 /*
1241 * Register, register.
1242 */
1243 if (pVCpu->iem.s.uVexLength == 0)
1244 {
1245 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1246 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1247 IEM_MC_LOCAL(RTUINT128U, uSrc);
1248
1249 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1250 IEM_MC_PREPARE_AVX_USAGE();
1251
1252 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1253 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1254 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1255 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1256 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1257 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1258
1259 IEM_MC_ADVANCE_RIP_AND_FINISH();
1260 IEM_MC_END();
1261 }
1262 else
1263 {
1264 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1265 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1266 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1267 IEM_MC_PREPARE_AVX_USAGE();
1268
1269 IEM_MC_LOCAL(RTUINT256U, uSrc);
1270 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1271 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1272 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1273 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1274 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1275 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1276 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1277 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1278 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1279 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1280
1281 IEM_MC_ADVANCE_RIP_AND_FINISH();
1282 IEM_MC_END();
1283 }
1284 }
1285 else
1286 {
1287 /*
1288 * Register, memory.
1289 */
1290 if (pVCpu->iem.s.uVexLength == 0)
1291 {
1292 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1293 IEM_MC_LOCAL(RTUINT128U, uSrc);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295
1296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1297 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1298 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1299 IEM_MC_PREPARE_AVX_USAGE();
1300
1301 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1302 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1303 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1304 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1305 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1306 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1307
1308 IEM_MC_ADVANCE_RIP_AND_FINISH();
1309 IEM_MC_END();
1310 }
1311 else
1312 {
1313 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1316 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1317 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1318 IEM_MC_PREPARE_AVX_USAGE();
1319
1320 IEM_MC_LOCAL(RTUINT256U, uSrc);
1321 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1322
1323 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1324 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1325 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1326 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1327 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1328 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1329 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1330 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1331 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1332
1333 IEM_MC_ADVANCE_RIP_AND_FINISH();
1334 IEM_MC_END();
1335 }
1336 }
1337}
1338
1339
1340/**
1341 * @opcode 0x12
1342 * @oppfx 0xf2
1343 * @opcpuid avx
1344 * @opgroup og_avx_pcksclr_datamove
1345 * @opxcpttype 5
1346 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1347 * -> op1=0x22222222111111112222222211111111
1348 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1349 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1350 */
1351FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1352{
1353 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1355 if (IEM_IS_MODRM_REG_MODE(bRm))
1356 {
1357 /*
1358 * Register, register.
1359 */
1360 if (pVCpu->iem.s.uVexLength == 0)
1361 {
1362 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1363 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1364 IEM_MC_LOCAL(uint64_t, uSrc);
1365
1366 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1367 IEM_MC_PREPARE_AVX_USAGE();
1368
1369 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1370 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1371 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1372 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1373
1374 IEM_MC_ADVANCE_RIP_AND_FINISH();
1375 IEM_MC_END();
1376 }
1377 else
1378 {
1379 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1380 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1381 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1382 IEM_MC_PREPARE_AVX_USAGE();
1383
1384 IEM_MC_LOCAL(uint64_t, uSrc1);
1385 IEM_MC_LOCAL(uint64_t, uSrc2);
1386 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1387 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1388
1389 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1390 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1391 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1392 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1393 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1394
1395 IEM_MC_ADVANCE_RIP_AND_FINISH();
1396 IEM_MC_END();
1397 }
1398 }
1399 else
1400 {
1401 /*
1402 * Register, memory.
1403 */
1404 if (pVCpu->iem.s.uVexLength == 0)
1405 {
1406 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1408 IEM_MC_LOCAL(uint64_t, uSrc);
1409
1410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1411 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1412 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1413 IEM_MC_PREPARE_AVX_USAGE();
1414
1415 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1416 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1417 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1418 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1419
1420 IEM_MC_ADVANCE_RIP_AND_FINISH();
1421 IEM_MC_END();
1422 }
1423 else
1424 {
1425 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1427
1428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1429 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1430 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1431 IEM_MC_PREPARE_AVX_USAGE();
1432
1433 IEM_MC_LOCAL(RTUINT256U, uSrc);
1434 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1435
1436 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1437 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1438 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1439 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1440 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1441
1442 IEM_MC_ADVANCE_RIP_AND_FINISH();
1443 IEM_MC_END();
1444 }
1445 }
1446}
1447
1448
1449/**
1450 * @opcode 0x13
1451 * @opcodesub !11 mr/reg
1452 * @oppfx none
1453 * @opcpuid avx
1454 * @opgroup og_avx_simdfp_datamove
1455 * @opxcpttype 5
1456 * @optest op1=1 op2=2 -> op1=2
1457 * @optest op1=0 op2=-42 -> op1=-42
1458 */
1459FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1460{
1461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1462 if (IEM_IS_MODRM_MEM_MODE(bRm))
1463 {
1464 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1465
1466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1467 IEM_MC_LOCAL(uint64_t, uSrc);
1468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1469
1470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1471 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1472 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1473 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1474
1475 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1476 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1477
1478 IEM_MC_ADVANCE_RIP_AND_FINISH();
1479 IEM_MC_END();
1480 }
1481
1482 /**
1483 * @opdone
1484 * @opmnemonic udvex0f13m3
1485 * @opcode 0x13
1486 * @opcodesub 11 mr/reg
1487 * @oppfx none
1488 * @opunused immediate
1489 * @opcpuid avx
1490 * @optest ->
1491 */
1492 else
1493 IEMOP_RAISE_INVALID_OPCODE_RET();
1494}
1495
1496
1497/**
1498 * @opcode 0x13
1499 * @opcodesub !11 mr/reg
1500 * @oppfx 0x66
1501 * @opcpuid avx
1502 * @opgroup og_avx_pcksclr_datamove
1503 * @opxcpttype 5
1504 * @optest op1=1 op2=2 -> op1=2
1505 * @optest op1=0 op2=-42 -> op1=-42
1506 */
1507FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1508{
1509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1510 if (IEM_IS_MODRM_MEM_MODE(bRm))
1511 {
1512 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1513 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1514 IEM_MC_LOCAL(uint64_t, uSrc);
1515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1516
1517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1518 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1519 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1520 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1521
1522 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1523 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1524
1525 IEM_MC_ADVANCE_RIP_AND_FINISH();
1526 IEM_MC_END();
1527 }
1528
1529 /**
1530 * @opdone
1531 * @opmnemonic udvex660f13m3
1532 * @opcode 0x13
1533 * @opcodesub 11 mr/reg
1534 * @oppfx 0x66
1535 * @opunused immediate
1536 * @opcpuid avx
1537 * @optest ->
1538 */
1539 else
1540 IEMOP_RAISE_INVALID_OPCODE_RET();
1541}
1542
1543/* Opcode VEX.F3.0F 0x13 - invalid */
1544/* Opcode VEX.F2.0F 0x13 - invalid */
1545
1546/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1547FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1548{
1549 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1550 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1551 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1552}
1553
1554
1555/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1556FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1557{
1558 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1559 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1560 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1561}
1562
1563
1564/* Opcode VEX.F3.0F 0x14 - invalid */
1565/* Opcode VEX.F2.0F 0x14 - invalid */
1566
1567
1568/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1569FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1570{
1571 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1572 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1573 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1574}
1575
1576
1577/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1578FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1579{
1580 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1581 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1582 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1583}
1584
1585
1586/* Opcode VEX.F3.0F 0x15 - invalid */
1587/* Opcode VEX.F2.0F 0x15 - invalid */
1588
1589
1590FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1591{
1592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1593 if (IEM_IS_MODRM_REG_MODE(bRm))
1594 {
1595 /**
1596 * @opcode 0x16
1597 * @opcodesub 11 mr/reg
1598 * @oppfx none
1599 * @opcpuid avx
1600 * @opgroup og_avx_simdfp_datamerge
1601 * @opxcpttype 7LZ
1602 */
1603 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1604
1605 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1606 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1607
1608 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1609 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1610 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1611 IEM_GET_MODRM_RM(pVCpu, bRm),
1612 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1613
1614 IEM_MC_ADVANCE_RIP_AND_FINISH();
1615 IEM_MC_END();
1616 }
1617 else
1618 {
1619 /**
1620 * @opdone
1621 * @opcode 0x16
1622 * @opcodesub !11 mr/reg
1623 * @oppfx none
1624 * @opcpuid avx
1625 * @opgroup og_avx_simdfp_datamove
1626 * @opxcpttype 5LZ
1627 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1628 */
1629 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1630
1631 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1632 IEM_MC_LOCAL(uint64_t, uSrc);
1633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1634
1635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1636 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1637 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1638 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1639
1640 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1641 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1642 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1643 uSrc);
1644
1645 IEM_MC_ADVANCE_RIP_AND_FINISH();
1646 IEM_MC_END();
1647 }
1648}
1649
1650
1651/**
1652 * @opcode 0x16
1653 * @opcodesub !11 mr/reg
1654 * @oppfx 0x66
1655 * @opcpuid avx
1656 * @opgroup og_avx_pcksclr_datamerge
1657 * @opxcpttype 5LZ
1658 */
1659FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1660{
1661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1662 if (IEM_IS_MODRM_MEM_MODE(bRm))
1663 {
1664 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1665
1666 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1667 IEM_MC_LOCAL(uint64_t, uSrc);
1668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1669
1670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1671 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1672 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1673 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1674
1675 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1676 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1677 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1678 uSrc);
1679
1680 IEM_MC_ADVANCE_RIP_AND_FINISH();
1681 IEM_MC_END();
1682 }
1683
1684 /**
1685 * @opdone
1686 * @opmnemonic udvex660f16m3
1687 * @opcode 0x12
1688 * @opcodesub 11 mr/reg
1689 * @oppfx 0x66
1690 * @opunused immediate
1691 * @opcpuid avx
1692 * @optest ->
1693 */
1694 else
1695 IEMOP_RAISE_INVALID_OPCODE_RET();
1696}
1697
1698
1699/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1700/**
1701 * @opcode 0x16
1702 * @oppfx 0xf3
1703 * @opcpuid avx
1704 * @opgroup og_avx_pcksclr_datamove
1705 * @opxcpttype 4
1706 */
1707FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1708{
1709 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1710 Assert(pVCpu->iem.s.uVexLength <= 1);
1711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1712 if (IEM_IS_MODRM_REG_MODE(bRm))
1713 {
1714 /*
1715 * Register, register.
1716 */
1717 if (pVCpu->iem.s.uVexLength == 0)
1718 {
1719 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1720 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1721 IEM_MC_LOCAL(RTUINT128U, uSrc);
1722
1723 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1724 IEM_MC_PREPARE_AVX_USAGE();
1725
1726 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1727 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1728 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1729 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1730 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1731 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1732
1733 IEM_MC_ADVANCE_RIP_AND_FINISH();
1734 IEM_MC_END();
1735 }
1736 else
1737 {
1738 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1739 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1741 IEM_MC_PREPARE_AVX_USAGE();
1742
1743 IEM_MC_LOCAL(RTUINT256U, uSrc);
1744 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1745 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1746 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1747 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1748 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1749 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1750 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1751 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1752 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1753 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1754
1755 IEM_MC_ADVANCE_RIP_AND_FINISH();
1756 IEM_MC_END();
1757 }
1758 }
1759 else
1760 {
1761 /*
1762 * Register, memory.
1763 */
1764 if (pVCpu->iem.s.uVexLength == 0)
1765 {
1766 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1767 IEM_MC_LOCAL(RTUINT128U, uSrc);
1768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1769
1770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1771 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1772 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1773 IEM_MC_PREPARE_AVX_USAGE();
1774
1775 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1776 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1777 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1778 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1779 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1780 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1781
1782 IEM_MC_ADVANCE_RIP_AND_FINISH();
1783 IEM_MC_END();
1784 }
1785 else
1786 {
1787 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1790 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1791 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1792 IEM_MC_PREPARE_AVX_USAGE();
1793
1794 IEM_MC_LOCAL(RTUINT256U, uSrc);
1795 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1796
1797 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1798 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1799 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1800 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1801 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1802 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1803 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1804 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1805 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1806
1807 IEM_MC_ADVANCE_RIP_AND_FINISH();
1808 IEM_MC_END();
1809 }
1810 }
1811}
1812
1813
1814/* Opcode VEX.F2.0F 0x16 - invalid */
1815
1816
1817/**
1818 * @opcode 0x17
1819 * @opcodesub !11 mr/reg
1820 * @oppfx none
1821 * @opcpuid avx
1822 * @opgroup og_avx_simdfp_datamove
1823 * @opxcpttype 5
1824 */
1825FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1826{
1827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1828 if (IEM_IS_MODRM_MEM_MODE(bRm))
1829 {
1830 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1831
1832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1833 IEM_MC_LOCAL(uint64_t, uSrc);
1834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1835
1836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1837 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1838 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1839 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1840
1841 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1842 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1843
1844 IEM_MC_ADVANCE_RIP_AND_FINISH();
1845 IEM_MC_END();
1846 }
1847
1848 /**
1849 * @opdone
1850 * @opmnemonic udvex0f17m3
1851 * @opcode 0x17
1852 * @opcodesub 11 mr/reg
1853 * @oppfx none
1854 * @opunused immediate
1855 * @opcpuid avx
1856 * @optest ->
1857 */
1858 else
1859 IEMOP_RAISE_INVALID_OPCODE_RET();
1860}
1861
1862
1863/**
1864 * @opcode 0x17
1865 * @opcodesub !11 mr/reg
1866 * @oppfx 0x66
1867 * @opcpuid avx
1868 * @opgroup og_avx_pcksclr_datamove
1869 * @opxcpttype 5
1870 */
1871FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1872{
1873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1874 if (IEM_IS_MODRM_MEM_MODE(bRm))
1875 {
1876 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1877
1878 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1879 IEM_MC_LOCAL(uint64_t, uSrc);
1880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1881
1882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1883 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1884 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1885 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1886
1887 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1888 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1889
1890 IEM_MC_ADVANCE_RIP_AND_FINISH();
1891 IEM_MC_END();
1892 }
1893
1894 /**
1895 * @opdone
1896 * @opmnemonic udvex660f17m3
1897 * @opcode 0x17
1898 * @opcodesub 11 mr/reg
1899 * @oppfx 0x66
1900 * @opunused immediate
1901 * @opcpuid avx
1902 * @optest ->
1903 */
1904 else
1905 IEMOP_RAISE_INVALID_OPCODE_RET();
1906}
1907
1908
1909/* Opcode VEX.F3.0F 0x17 - invalid */
1910/* Opcode VEX.F2.0F 0x17 - invalid */
1911
1912
1913/* Opcode VEX.0F 0x18 - invalid */
1914/* Opcode VEX.0F 0x19 - invalid */
1915/* Opcode VEX.0F 0x1a - invalid */
1916/* Opcode VEX.0F 0x1b - invalid */
1917/* Opcode VEX.0F 0x1c - invalid */
1918/* Opcode VEX.0F 0x1d - invalid */
1919/* Opcode VEX.0F 0x1e - invalid */
1920/* Opcode VEX.0F 0x1f - invalid */
1921
1922/* Opcode VEX.0F 0x20 - invalid */
1923/* Opcode VEX.0F 0x21 - invalid */
1924/* Opcode VEX.0F 0x22 - invalid */
1925/* Opcode VEX.0F 0x23 - invalid */
1926/* Opcode VEX.0F 0x24 - invalid */
1927/* Opcode VEX.0F 0x25 - invalid */
1928/* Opcode VEX.0F 0x26 - invalid */
1929/* Opcode VEX.0F 0x27 - invalid */
1930
1931/**
1932 * @opcode 0x28
1933 * @oppfx none
1934 * @opcpuid avx
1935 * @opgroup og_avx_pcksclr_datamove
1936 * @opxcpttype 1
1937 * @optest op1=1 op2=2 -> op1=2
1938 * @optest op1=0 op2=-42 -> op1=-42
1939 * @note Almost identical to vmovapd.
1940 */
1941FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1942{
1943 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1944 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1945 Assert(pVCpu->iem.s.uVexLength <= 1);
1946 if (IEM_IS_MODRM_REG_MODE(bRm))
1947 {
1948 /*
1949 * Register, register.
1950 */
1951 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1952 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1953
1954 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1955 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1956 if (pVCpu->iem.s.uVexLength == 0)
1957 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1958 IEM_GET_MODRM_RM(pVCpu, bRm));
1959 else
1960 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1961 IEM_GET_MODRM_RM(pVCpu, bRm));
1962 IEM_MC_ADVANCE_RIP_AND_FINISH();
1963 IEM_MC_END();
1964 }
1965 else
1966 {
1967 /*
1968 * Register, memory.
1969 */
1970 if (pVCpu->iem.s.uVexLength == 0)
1971 {
1972 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1974 IEM_MC_LOCAL(RTUINT128U, uSrc);
1975
1976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1977 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1978 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1979 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1980
1981 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1982 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1983
1984 IEM_MC_ADVANCE_RIP_AND_FINISH();
1985 IEM_MC_END();
1986 }
1987 else
1988 {
1989 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1991 IEM_MC_LOCAL(RTUINT256U, uSrc);
1992
1993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1994 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1995 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1996 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1997
1998 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1999 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2000
2001 IEM_MC_ADVANCE_RIP_AND_FINISH();
2002 IEM_MC_END();
2003 }
2004 }
2005}
2006
2007
2008/**
2009 * @opcode 0x28
2010 * @oppfx 66
2011 * @opcpuid avx
2012 * @opgroup og_avx_pcksclr_datamove
2013 * @opxcpttype 1
2014 * @optest op1=1 op2=2 -> op1=2
2015 * @optest op1=0 op2=-42 -> op1=-42
2016 * @note Almost identical to vmovaps
2017 */
2018FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2019{
2020 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2022 Assert(pVCpu->iem.s.uVexLength <= 1);
2023 if (IEM_IS_MODRM_REG_MODE(bRm))
2024 {
2025 /*
2026 * Register, register.
2027 */
2028 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2029 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2030
2031 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2032 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2033 if (pVCpu->iem.s.uVexLength == 0)
2034 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2035 IEM_GET_MODRM_RM(pVCpu, bRm));
2036 else
2037 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2038 IEM_GET_MODRM_RM(pVCpu, bRm));
2039 IEM_MC_ADVANCE_RIP_AND_FINISH();
2040 IEM_MC_END();
2041 }
2042 else
2043 {
2044 /*
2045 * Register, memory.
2046 */
2047 if (pVCpu->iem.s.uVexLength == 0)
2048 {
2049 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2051 IEM_MC_LOCAL(RTUINT128U, uSrc);
2052
2053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2054 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2055 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2056 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2057
2058 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2059 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2060
2061 IEM_MC_ADVANCE_RIP_AND_FINISH();
2062 IEM_MC_END();
2063 }
2064 else
2065 {
2066 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068 IEM_MC_LOCAL(RTUINT256U, uSrc);
2069
2070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2071 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2072 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2073 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2074
2075 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2076 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2077
2078 IEM_MC_ADVANCE_RIP_AND_FINISH();
2079 IEM_MC_END();
2080 }
2081 }
2082}
2083
2084/**
2085 * @opmnemonic udvexf30f28
2086 * @opcode 0x28
2087 * @oppfx 0xf3
2088 * @opunused vex.modrm
2089 * @opcpuid avx
2090 * @optest ->
2091 * @opdone
2092 */
2093
2094/**
2095 * @opmnemonic udvexf20f28
2096 * @opcode 0x28
2097 * @oppfx 0xf2
2098 * @opunused vex.modrm
2099 * @opcpuid avx
2100 * @optest ->
2101 * @opdone
2102 */
2103
2104/**
2105 * @opcode 0x29
2106 * @oppfx none
2107 * @opcpuid avx
2108 * @opgroup og_avx_pcksclr_datamove
2109 * @opxcpttype 1
2110 * @optest op1=1 op2=2 -> op1=2
2111 * @optest op1=0 op2=-42 -> op1=-42
2112 * @note Almost identical to vmovapd.
2113 */
2114FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2115{
2116 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2118 Assert(pVCpu->iem.s.uVexLength <= 1);
2119 if (IEM_IS_MODRM_REG_MODE(bRm))
2120 {
2121 /*
2122 * Register, register.
2123 */
2124 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2125 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2126
2127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2128 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2129 if (pVCpu->iem.s.uVexLength == 0)
2130 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2131 IEM_GET_MODRM_REG(pVCpu, bRm));
2132 else
2133 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2134 IEM_GET_MODRM_REG(pVCpu, bRm));
2135 IEM_MC_ADVANCE_RIP_AND_FINISH();
2136 IEM_MC_END();
2137 }
2138 else
2139 {
2140 /*
2141 * Register, memory.
2142 */
2143 if (pVCpu->iem.s.uVexLength == 0)
2144 {
2145 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2147 IEM_MC_LOCAL(RTUINT128U, uSrc);
2148
2149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2150 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2151 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2152 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2153
2154 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2155 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2156
2157 IEM_MC_ADVANCE_RIP_AND_FINISH();
2158 IEM_MC_END();
2159 }
2160 else
2161 {
2162 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164 IEM_MC_LOCAL(RTUINT256U, uSrc);
2165
2166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2167 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2168 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2169 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2170
2171 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2172 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2173
2174 IEM_MC_ADVANCE_RIP_AND_FINISH();
2175 IEM_MC_END();
2176 }
2177 }
2178}
2179
2180/**
2181 * @opcode 0x29
2182 * @oppfx 66
2183 * @opcpuid avx
2184 * @opgroup og_avx_pcksclr_datamove
2185 * @opxcpttype 1
2186 * @optest op1=1 op2=2 -> op1=2
2187 * @optest op1=0 op2=-42 -> op1=-42
2188 * @note Almost identical to vmovaps
2189 */
2190FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2191{
2192 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2193 Assert(pVCpu->iem.s.uVexLength <= 1);
2194 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2195 if (IEM_IS_MODRM_REG_MODE(bRm))
2196 {
2197 /*
2198 * Register, register.
2199 */
2200 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2201 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2202
2203 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2204 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2205 if (pVCpu->iem.s.uVexLength == 0)
2206 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2207 IEM_GET_MODRM_REG(pVCpu, bRm));
2208 else
2209 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2210 IEM_GET_MODRM_REG(pVCpu, bRm));
2211 IEM_MC_ADVANCE_RIP_AND_FINISH();
2212 IEM_MC_END();
2213 }
2214 else
2215 {
2216 /*
2217 * Register, memory.
2218 */
2219 if (pVCpu->iem.s.uVexLength == 0)
2220 {
2221 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2223 IEM_MC_LOCAL(RTUINT128U, uSrc);
2224
2225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2226 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2227 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2228 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2229
2230 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2231 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2232
2233 IEM_MC_ADVANCE_RIP_AND_FINISH();
2234 IEM_MC_END();
2235 }
2236 else
2237 {
2238 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2240 IEM_MC_LOCAL(RTUINT256U, uSrc);
2241
2242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2243 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2244 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2245 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2246
2247 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2248 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2249
2250 IEM_MC_ADVANCE_RIP_AND_FINISH();
2251 IEM_MC_END();
2252 }
2253 }
2254}
2255
2256
2257/**
2258 * @opmnemonic udvexf30f29
2259 * @opcode 0x29
2260 * @oppfx 0xf3
2261 * @opunused vex.modrm
2262 * @opcpuid avx
2263 * @optest ->
2264 * @opdone
2265 */
2266
2267/**
2268 * @opmnemonic udvexf20f29
2269 * @opcode 0x29
2270 * @oppfx 0xf2
2271 * @opunused vex.modrm
2272 * @opcpuid avx
2273 * @optest ->
2274 * @opdone
2275 */
2276
2277
2278/** Opcode VEX.0F 0x2a - invalid */
2279/** Opcode VEX.66.0F 0x2a - invalid */
2280/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2281FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2282/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2283FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2284
2285
2286/**
2287 * @opcode 0x2b
2288 * @opcodesub !11 mr/reg
2289 * @oppfx none
2290 * @opcpuid avx
2291 * @opgroup og_avx_cachect
2292 * @opxcpttype 1
2293 * @optest op1=1 op2=2 -> op1=2
2294 * @optest op1=0 op2=-42 -> op1=-42
2295 * @note Identical implementation to vmovntpd
2296 */
2297FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2298{
2299 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2300 Assert(pVCpu->iem.s.uVexLength <= 1);
2301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2302 if (IEM_IS_MODRM_MEM_MODE(bRm))
2303 {
2304 /*
2305 * memory, register.
2306 */
2307 if (pVCpu->iem.s.uVexLength == 0)
2308 {
2309 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2310 IEM_MC_LOCAL(RTUINT128U, uSrc);
2311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2312
2313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2314 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2315 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2316 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2317
2318 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2319 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2320
2321 IEM_MC_ADVANCE_RIP_AND_FINISH();
2322 IEM_MC_END();
2323 }
2324 else
2325 {
2326 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2327 IEM_MC_LOCAL(RTUINT256U, uSrc);
2328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2329
2330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2331 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2332 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2333 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2334
2335 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2336 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2337
2338 IEM_MC_ADVANCE_RIP_AND_FINISH();
2339 IEM_MC_END();
2340 }
2341 }
2342 /* The register, register encoding is invalid. */
2343 else
2344 IEMOP_RAISE_INVALID_OPCODE_RET();
2345}
2346
2347/**
2348 * @opcode 0x2b
2349 * @opcodesub !11 mr/reg
2350 * @oppfx 0x66
2351 * @opcpuid avx
2352 * @opgroup og_avx_cachect
2353 * @opxcpttype 1
2354 * @optest op1=1 op2=2 -> op1=2
2355 * @optest op1=0 op2=-42 -> op1=-42
2356 * @note Identical implementation to vmovntps
2357 */
2358FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2359{
2360 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2361 Assert(pVCpu->iem.s.uVexLength <= 1);
2362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2363 if (IEM_IS_MODRM_MEM_MODE(bRm))
2364 {
2365 /*
2366 * memory, register.
2367 */
2368 if (pVCpu->iem.s.uVexLength == 0)
2369 {
2370 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2371 IEM_MC_LOCAL(RTUINT128U, uSrc);
2372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2373
2374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2375 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2376 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2377 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2378
2379 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2380 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2381
2382 IEM_MC_ADVANCE_RIP_AND_FINISH();
2383 IEM_MC_END();
2384 }
2385 else
2386 {
2387 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2388 IEM_MC_LOCAL(RTUINT256U, uSrc);
2389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2390
2391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2392 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2393 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2394 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2395
2396 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2397 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2398
2399 IEM_MC_ADVANCE_RIP_AND_FINISH();
2400 IEM_MC_END();
2401 }
2402 }
2403 /* The register, register encoding is invalid. */
2404 else
2405 IEMOP_RAISE_INVALID_OPCODE_RET();
2406}
2407
2408/**
2409 * @opmnemonic udvexf30f2b
2410 * @opcode 0x2b
2411 * @oppfx 0xf3
2412 * @opunused vex.modrm
2413 * @opcpuid avx
2414 * @optest ->
2415 * @opdone
2416 */
2417
2418/**
2419 * @opmnemonic udvexf20f2b
2420 * @opcode 0x2b
2421 * @oppfx 0xf2
2422 * @opunused vex.modrm
2423 * @opcpuid avx
2424 * @optest ->
2425 * @opdone
2426 */
2427
2428
2429/* Opcode VEX.0F 0x2c - invalid */
2430/* Opcode VEX.66.0F 0x2c - invalid */
2431/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2432FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2433/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2434FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2435
2436/* Opcode VEX.0F 0x2d - invalid */
2437/* Opcode VEX.66.0F 0x2d - invalid */
2438/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2439FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2440/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2441FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2442
2443
2444/**
2445 * @opcode 0x2e
2446 * @oppfx none
2447 * @opflmodify cf,pf,af,zf,sf,of
2448 * @opflclear af,sf,of
2449 */
2450FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2451{
2452 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2454 if (IEM_IS_MODRM_REG_MODE(bRm))
2455 {
2456 /*
2457 * Register, register.
2458 */
2459 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2460 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2461 IEM_MC_LOCAL(uint32_t, fEFlags);
2462 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2463 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2464 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2465 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2466 IEM_MC_PREPARE_AVX_USAGE();
2467 IEM_MC_FETCH_EFLAGS(fEFlags);
2468 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2469 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2470 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2471 pEFlags, uSrc1, uSrc2);
2472 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2473 IEM_MC_COMMIT_EFLAGS(fEFlags);
2474
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 }
2478 else
2479 {
2480 /*
2481 * Register, memory.
2482 */
2483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2484 IEM_MC_LOCAL(uint32_t, fEFlags);
2485 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2486 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2487 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2489
2490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2491 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2492 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2493 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2494
2495 IEM_MC_PREPARE_AVX_USAGE();
2496 IEM_MC_FETCH_EFLAGS(fEFlags);
2497 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2498 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2499 pEFlags, uSrc1, uSrc2);
2500 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2501 IEM_MC_COMMIT_EFLAGS(fEFlags);
2502
2503 IEM_MC_ADVANCE_RIP_AND_FINISH();
2504 IEM_MC_END();
2505 }
2506}
2507
2508
2509/**
2510 * @opcode 0x2e
2511 * @oppfx 0x66
2512 * @opflmodify cf,pf,af,zf,sf,of
2513 * @opflclear af,sf,of
2514 */
2515FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2516{
2517 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2519 if (IEM_IS_MODRM_REG_MODE(bRm))
2520 {
2521 /*
2522 * Register, register.
2523 */
2524 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2525 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2526 IEM_MC_LOCAL(uint32_t, fEFlags);
2527 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2528 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2529 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2530 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2531 IEM_MC_PREPARE_AVX_USAGE();
2532 IEM_MC_FETCH_EFLAGS(fEFlags);
2533 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2534 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2535 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2536 pEFlags, uSrc1, uSrc2);
2537 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2538 IEM_MC_COMMIT_EFLAGS(fEFlags);
2539
2540 IEM_MC_ADVANCE_RIP_AND_FINISH();
2541 IEM_MC_END();
2542 }
2543 else
2544 {
2545 /*
2546 * Register, memory.
2547 */
2548 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2549 IEM_MC_LOCAL(uint32_t, fEFlags);
2550 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2551 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2552 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2554
2555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2556 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2558 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2559
2560 IEM_MC_PREPARE_AVX_USAGE();
2561 IEM_MC_FETCH_EFLAGS(fEFlags);
2562 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2563 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2564 pEFlags, uSrc1, uSrc2);
2565 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2566 IEM_MC_COMMIT_EFLAGS(fEFlags);
2567
2568 IEM_MC_ADVANCE_RIP_AND_FINISH();
2569 IEM_MC_END();
2570 }
2571}
2572
2573
2574/* Opcode VEX.F3.0F 0x2e - invalid */
2575/* Opcode VEX.F2.0F 0x2e - invalid */
2576
2577/**
2578 * @opcode 0x2f
2579 * @oppfx none
2580 * @opflmodify cf,pf,af,zf,sf,of
2581 * @opflclear af,sf,of
2582 */
2583FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2584{
2585 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2587 if (IEM_IS_MODRM_REG_MODE(bRm))
2588 {
2589 /*
2590 * Register, register.
2591 */
2592 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2593 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2594 IEM_MC_LOCAL(uint32_t, fEFlags);
2595 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2596 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2597 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2598 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2599 IEM_MC_PREPARE_AVX_USAGE();
2600 IEM_MC_FETCH_EFLAGS(fEFlags);
2601 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2602 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2603 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2604 pEFlags, uSrc1, uSrc2);
2605 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2606 IEM_MC_COMMIT_EFLAGS(fEFlags);
2607
2608 IEM_MC_ADVANCE_RIP_AND_FINISH();
2609 IEM_MC_END();
2610 }
2611 else
2612 {
2613 /*
2614 * Register, memory.
2615 */
2616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2617 IEM_MC_LOCAL(uint32_t, fEFlags);
2618 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2619 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2620 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2622
2623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2624 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2625 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2626 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2627
2628 IEM_MC_PREPARE_AVX_USAGE();
2629 IEM_MC_FETCH_EFLAGS(fEFlags);
2630 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2631 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2632 pEFlags, uSrc1, uSrc2);
2633 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2634 IEM_MC_COMMIT_EFLAGS(fEFlags);
2635
2636 IEM_MC_ADVANCE_RIP_AND_FINISH();
2637 IEM_MC_END();
2638 }
2639}
2640
2641
2642/**
2643 * @opcode 0x2f
2644 * @oppfx 0x66
2645 * @opflmodify cf,pf,af,zf,sf,of
2646 * @opflclear af,sf,of
2647 */
2648FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2649{
2650 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2652 if (IEM_IS_MODRM_REG_MODE(bRm))
2653 {
2654 /*
2655 * Register, register.
2656 */
2657 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2658 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2659 IEM_MC_LOCAL(uint32_t, fEFlags);
2660 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2661 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2662 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2663 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2664 IEM_MC_PREPARE_AVX_USAGE();
2665 IEM_MC_FETCH_EFLAGS(fEFlags);
2666 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2667 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2668 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2669 pEFlags, uSrc1, uSrc2);
2670 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2671 IEM_MC_COMMIT_EFLAGS(fEFlags);
2672
2673 IEM_MC_ADVANCE_RIP_AND_FINISH();
2674 IEM_MC_END();
2675 }
2676 else
2677 {
2678 /*
2679 * Register, memory.
2680 */
2681 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2682 IEM_MC_LOCAL(uint32_t, fEFlags);
2683 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2684 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2685 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2687
2688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2689 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2690 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2691 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2692
2693 IEM_MC_PREPARE_AVX_USAGE();
2694 IEM_MC_FETCH_EFLAGS(fEFlags);
2695 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2696 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2697 pEFlags, uSrc1, uSrc2);
2698 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2699 IEM_MC_COMMIT_EFLAGS(fEFlags);
2700
2701 IEM_MC_ADVANCE_RIP_AND_FINISH();
2702 IEM_MC_END();
2703 }
2704}
2705
2706
2707/* Opcode VEX.F3.0F 0x2f - invalid */
2708/* Opcode VEX.F2.0F 0x2f - invalid */
2709
2710/* Opcode VEX.0F 0x30 - invalid */
2711/* Opcode VEX.0F 0x31 - invalid */
2712/* Opcode VEX.0F 0x32 - invalid */
2713/* Opcode VEX.0F 0x33 - invalid */
2714/* Opcode VEX.0F 0x34 - invalid */
2715/* Opcode VEX.0F 0x35 - invalid */
2716/* Opcode VEX.0F 0x36 - invalid */
2717/* Opcode VEX.0F 0x37 - invalid */
2718/* Opcode VEX.0F 0x38 - invalid */
2719/* Opcode VEX.0F 0x39 - invalid */
2720/* Opcode VEX.0F 0x3a - invalid */
2721/* Opcode VEX.0F 0x3b - invalid */
2722/* Opcode VEX.0F 0x3c - invalid */
2723/* Opcode VEX.0F 0x3d - invalid */
2724/* Opcode VEX.0F 0x3e - invalid */
2725/* Opcode VEX.0F 0x3f - invalid */
2726/* Opcode VEX.0F 0x40 - invalid */
2727/* Opcode VEX.0F 0x41 - invalid */
2728/* Opcode VEX.0F 0x42 - invalid */
2729/* Opcode VEX.0F 0x43 - invalid */
2730/* Opcode VEX.0F 0x44 - invalid */
2731/* Opcode VEX.0F 0x45 - invalid */
2732/* Opcode VEX.0F 0x46 - invalid */
2733/* Opcode VEX.0F 0x47 - invalid */
2734/* Opcode VEX.0F 0x48 - invalid */
2735/* Opcode VEX.0F 0x49 - invalid */
2736/* Opcode VEX.0F 0x4a - invalid */
2737/* Opcode VEX.0F 0x4b - invalid */
2738/* Opcode VEX.0F 0x4c - invalid */
2739/* Opcode VEX.0F 0x4d - invalid */
2740/* Opcode VEX.0F 0x4e - invalid */
2741/* Opcode VEX.0F 0x4f - invalid */
2742
2743
2744/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2745FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2746{
2747 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2749 if (IEM_IS_MODRM_REG_MODE(bRm))
2750 {
2751 /*
2752 * Register, register.
2753 */
2754 if (pVCpu->iem.s.uVexLength == 0)
2755 {
2756 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2757 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2758 IEM_MC_LOCAL(uint8_t, u8Dst);
2759 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2760 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2761 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2762 IEM_MC_PREPARE_AVX_USAGE();
2763 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2764 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2765 pu8Dst, puSrc);
2766 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2767 IEM_MC_ADVANCE_RIP_AND_FINISH();
2768 IEM_MC_END();
2769 }
2770 else
2771 {
2772 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2773 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2774 IEM_MC_LOCAL(uint8_t, u8Dst);
2775 IEM_MC_LOCAL(RTUINT256U, uSrc);
2776 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2777 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2778
2779 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2780 IEM_MC_PREPARE_AVX_USAGE();
2781 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2782 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2783 pu8Dst, puSrc);
2784 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2785 IEM_MC_ADVANCE_RIP_AND_FINISH();
2786 IEM_MC_END();
2787 }
2788 }
2789 /* No memory operand. */
2790 else
2791 IEMOP_RAISE_INVALID_OPCODE_RET();
2792}
2793
2794
2795/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2796FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2797{
2798 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2800 if (IEM_IS_MODRM_REG_MODE(bRm))
2801 {
2802 /*
2803 * Register, register.
2804 */
2805 if (pVCpu->iem.s.uVexLength == 0)
2806 {
2807 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2808 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2809 IEM_MC_LOCAL(uint8_t, u8Dst);
2810 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2811 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2812 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2813 IEM_MC_PREPARE_AVX_USAGE();
2814 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2815 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2816 pu8Dst, puSrc);
2817 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2818 IEM_MC_ADVANCE_RIP_AND_FINISH();
2819 IEM_MC_END();
2820 }
2821 else
2822 {
2823 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2824 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2825 IEM_MC_LOCAL(uint8_t, u8Dst);
2826 IEM_MC_LOCAL(RTUINT256U, uSrc);
2827 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2828 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2829
2830 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2831 IEM_MC_PREPARE_AVX_USAGE();
2832 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2833 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2834 pu8Dst, puSrc);
2835 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2836 IEM_MC_ADVANCE_RIP_AND_FINISH();
2837 IEM_MC_END();
2838 }
2839 }
2840 /* No memory operand. */
2841 else
2842 IEMOP_RAISE_INVALID_OPCODE_RET();
2843}
2844
2845
2846/* Opcode VEX.F3.0F 0x50 - invalid */
2847/* Opcode VEX.F2.0F 0x50 - invalid */
2848
2849/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2850FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2851/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2852FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2853/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2854FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2855/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2856FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2857
2858/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2859FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2860/* Opcode VEX.66.0F 0x52 - invalid */
2861/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2862FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2863/* Opcode VEX.F2.0F 0x52 - invalid */
2864
2865/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2866FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2867/* Opcode VEX.66.0F 0x53 - invalid */
2868/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2869FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2870/* Opcode VEX.F2.0F 0x53 - invalid */
2871
2872
2873/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2874FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2875{
2876 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2877 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2878 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2879}
2880
2881
2882/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2883FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2884{
2885 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2886 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2887 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2888}
2889
2890
2891/* Opcode VEX.F3.0F 0x54 - invalid */
2892/* Opcode VEX.F2.0F 0x54 - invalid */
2893
2894
2895/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2896FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2897{
2898 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2899 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2900 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2901}
2902
2903
2904/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2905FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2906{
2907 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2908 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2909 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2910}
2911
2912
2913/* Opcode VEX.F3.0F 0x55 - invalid */
2914/* Opcode VEX.F2.0F 0x55 - invalid */
2915
2916/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2917FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2918{
2919 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2920 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2921 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2922}
2923
2924
2925/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2926FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2927{
2928 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2929 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2930 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2931}
2932
2933
2934/* Opcode VEX.F3.0F 0x56 - invalid */
2935/* Opcode VEX.F2.0F 0x56 - invalid */
2936
2937
2938/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2939FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2940{
2941 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2942 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2943 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2944}
2945
2946
2947/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2948FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2949{
2950 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2951 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2952 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2953}
2954
2955
2956/* Opcode VEX.F3.0F 0x57 - invalid */
2957/* Opcode VEX.F2.0F 0x57 - invalid */
2958
2959
2960/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2961FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
2962{
2963 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2964 IEMOPMEDIAF3_INIT_VARS( vaddps);
2965 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2966}
2967
2968
2969/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2970FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
2971{
2972 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2973 IEMOPMEDIAF3_INIT_VARS( vaddpd);
2974 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2975}
2976
2977
2978/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2979FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
2980{
2981 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2982 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
2983 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
2984}
2985
2986
2987/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2988FNIEMOP_DEF(iemOp_vaddsd_Vsd_Hsd_Wsd)
2989{
2990 IEMOP_MNEMONIC3(VEX_RVM, VADDSD, vaddsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2991 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
2992 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback));
2993}
2994
2995
2996/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2997FNIEMOP_DEF(iemOp_vmulps_Vps_Hps_Wps)
2998{
2999 IEMOP_MNEMONIC3(VEX_RVM, VMULPS, vmulps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3000 IEMOPMEDIAF3_INIT_VARS( vmulps);
3001 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3002}
3003
3004
3005/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
3006FNIEMOP_DEF(iemOp_vmulpd_Vpd_Hpd_Wpd)
3007{
3008 IEMOP_MNEMONIC3(VEX_RVM, VMULPD, vmulpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3009 IEMOPMEDIAF3_INIT_VARS( vmulpd);
3010 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3011}
3012
3013
3014/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
3015FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
3016/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
3017FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
3018
3019/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
3020FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
3021/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
3022FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
3023/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
3024FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
3025/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
3026FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
3027
3028/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
3029FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
3030/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
3031FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
3032/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
3033FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
3034/* Opcode VEX.F2.0F 0x5b - invalid */
3035
3036
3037/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
3038FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
3039{
3040 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3041 IEMOPMEDIAF3_INIT_VARS( vsubps);
3042 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3043}
3044
3045
3046/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
3047FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
3048{
3049 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3050 IEMOPMEDIAF3_INIT_VARS( vsubpd);
3051 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3052}
3053
3054
3055/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
3056FNIEMOP_DEF(iemOp_vsubss_Vss_Hss_Wss)
3057{
3058 IEMOP_MNEMONIC3(VEX_RVM, VSUBSS, vsubss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3059 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3060 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback));
3061}
3062
3063
3064/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
3065FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
3066
3067/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
3068FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
3069/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
3070FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
3071/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
3072FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
3073/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
3074FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
3075
3076/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
3077FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
3078/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
3079FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
3080/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
3081FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
3082/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
3083FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
3084
3085/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
3086FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
3087/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
3088FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
3089/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
3090FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
3091/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
3092FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
3093
3094
3095/* Opcode VEX.0F 0x60 - invalid */
3096
3097
3098/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
3099FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
3100{
3101 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3102 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
3103 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3104}
3105
3106
3107/* Opcode VEX.F3.0F 0x60 - invalid */
3108
3109
3110/* Opcode VEX.0F 0x61 - invalid */
3111
3112
3113/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
3114FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
3115{
3116 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3117 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
3118 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3119}
3120
3121
3122/* Opcode VEX.F3.0F 0x61 - invalid */
3123
3124
3125/* Opcode VEX.0F 0x62 - invalid */
3126
3127/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
3128FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
3129{
3130 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3131 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
3132 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3133}
3134
3135
3136/* Opcode VEX.F3.0F 0x62 - invalid */
3137
3138
3139
3140/* Opcode VEX.0F 0x63 - invalid */
3141
3142
3143/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
3144FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
3145{
3146 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3147 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
3148 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3149}
3150
3151
3152/* Opcode VEX.F3.0F 0x63 - invalid */
3153
3154/* Opcode VEX.0F 0x64 - invalid */
3155
3156
3157/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
3158FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
3159{
3160 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3161 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
3162 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3163}
3164
3165
3166/* Opcode VEX.F3.0F 0x64 - invalid */
3167
3168/* Opcode VEX.0F 0x65 - invalid */
3169
3170
3171/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3172FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3173{
3174 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3175 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
3176 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3177}
3178
3179
3180/* Opcode VEX.F3.0F 0x65 - invalid */
3181
3182/* Opcode VEX.0F 0x66 - invalid */
3183
3184
3185/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3186FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3187{
3188 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3189 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3190 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3191}
3192
3193
3194/* Opcode VEX.F3.0F 0x66 - invalid */
3195
3196/* Opcode VEX.0F 0x67 - invalid */
3197
3198
3199/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3200FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3201{
3202 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3203 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3204 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3205}
3206
3207
3208/* Opcode VEX.F3.0F 0x67 - invalid */
3209
3210
3211///**
3212// * Common worker for SSE2 instructions on the form:
3213// * pxxxx xmm1, xmm2/mem128
3214// *
3215// * The 2nd operand is the second half of a register, which in the memory case
3216// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3217// * where it may read the full 128 bits or only the upper 64 bits.
3218// *
3219// * Exceptions type 4.
3220// */
3221//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3222//{
3223// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3224// if (IEM_IS_MODRM_REG_MODE(bRm))
3225// {
3226// /*
3227// * Register, register.
3228// */
3229// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3230// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3231// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3232// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3233// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3234// IEM_MC_PREPARE_SSE_USAGE();
3235// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3236// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3237// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3238// IEM_MC_ADVANCE_RIP_AND_FINISH();
3239// IEM_MC_END();
3240// }
3241// else
3242// {
3243// /*
3244// * Register, memory.
3245// */
3246// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3247// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3248// IEM_MC_LOCAL(RTUINT128U, uSrc);
3249// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3250// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3251//
3252// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3253// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3254// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3255// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3256//
3257// IEM_MC_PREPARE_SSE_USAGE();
3258// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3259// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3260//
3261// IEM_MC_ADVANCE_RIP_AND_FINISH();
3262// IEM_MC_END();
3263// }
3264// return VINF_SUCCESS;
3265//}
3266
3267
3268/* Opcode VEX.0F 0x68 - invalid */
3269
3270/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3271FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3272{
3273 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3274 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3275 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3276}
3277
3278
3279/* Opcode VEX.F3.0F 0x68 - invalid */
3280
3281
3282/* Opcode VEX.0F 0x69 - invalid */
3283
3284
3285/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3286FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3287{
3288 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3289 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3290 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3291}
3292
3293
3294/* Opcode VEX.F3.0F 0x69 - invalid */
3295
3296
3297/* Opcode VEX.0F 0x6a - invalid */
3298
3299
3300/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3301FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3302{
3303 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3304 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3305 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3306}
3307
3308
3309/* Opcode VEX.F3.0F 0x6a - invalid */
3310
3311
3312/* Opcode VEX.0F 0x6b - invalid */
3313
3314
3315/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3316FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3317{
3318 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3319 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3320 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3321}
3322
3323
3324/* Opcode VEX.F3.0F 0x6b - invalid */
3325
3326
3327/* Opcode VEX.0F 0x6c - invalid */
3328
3329
3330/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3331FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3332{
3333 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3334 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3335 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3336}
3337
3338
3339/* Opcode VEX.F3.0F 0x6c - invalid */
3340/* Opcode VEX.F2.0F 0x6c - invalid */
3341
3342
3343/* Opcode VEX.0F 0x6d - invalid */
3344
3345
3346/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3347FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3348{
3349 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3350 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3351 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3352}
3353
3354
3355/* Opcode VEX.F3.0F 0x6d - invalid */
3356
3357
3358/* Opcode VEX.0F 0x6e - invalid */
3359
3360FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3361{
3362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3363 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3364 {
3365 /**
3366 * @opcode 0x6e
3367 * @opcodesub rex.w=1
3368 * @oppfx 0x66
3369 * @opcpuid avx
3370 * @opgroup og_avx_simdint_datamov
3371 * @opxcpttype 5
3372 * @optest 64-bit / op1=1 op2=2 -> op1=2
3373 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3374 */
3375 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3376 if (IEM_IS_MODRM_REG_MODE(bRm))
3377 {
3378 /* XMM, greg64 */
3379 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3380 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3381 IEM_MC_LOCAL(uint64_t, u64Tmp);
3382
3383 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3384 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3385
3386 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3387 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3388
3389 IEM_MC_ADVANCE_RIP_AND_FINISH();
3390 IEM_MC_END();
3391 }
3392 else
3393 {
3394 /* XMM, [mem64] */
3395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3397 IEM_MC_LOCAL(uint64_t, u64Tmp);
3398
3399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3400 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3402 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3403
3404 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3405 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3406
3407 IEM_MC_ADVANCE_RIP_AND_FINISH();
3408 IEM_MC_END();
3409 }
3410 }
3411 else
3412 {
3413 /**
3414 * @opdone
3415 * @opcode 0x6e
3416 * @opcodesub rex.w=0
3417 * @oppfx 0x66
3418 * @opcpuid avx
3419 * @opgroup og_avx_simdint_datamov
3420 * @opxcpttype 5
3421 * @opfunction iemOp_vmovd_q_Vy_Ey
3422 * @optest op1=1 op2=2 -> op1=2
3423 * @optest op1=0 op2=-42 -> op1=-42
3424 */
3425 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3426 if (IEM_IS_MODRM_REG_MODE(bRm))
3427 {
3428 /* XMM, greg32 */
3429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3430 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3431 IEM_MC_LOCAL(uint32_t, u32Tmp);
3432
3433 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3435
3436 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3437 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3438
3439 IEM_MC_ADVANCE_RIP_AND_FINISH();
3440 IEM_MC_END();
3441 }
3442 else
3443 {
3444 /* XMM, [mem32] */
3445 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3447 IEM_MC_LOCAL(uint32_t, u32Tmp);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3451 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3453
3454 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3455 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3456
3457 IEM_MC_ADVANCE_RIP_AND_FINISH();
3458 IEM_MC_END();
3459 }
3460 }
3461}
3462
3463
3464/* Opcode VEX.F3.0F 0x6e - invalid */
3465
3466
3467/* Opcode VEX.0F 0x6f - invalid */
3468
3469/**
3470 * @opcode 0x6f
3471 * @oppfx 0x66
3472 * @opcpuid avx
3473 * @opgroup og_avx_simdint_datamove
3474 * @opxcpttype 1
3475 * @optest op1=1 op2=2 -> op1=2
3476 * @optest op1=0 op2=-42 -> op1=-42
3477 */
3478FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3479{
3480 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3481 Assert(pVCpu->iem.s.uVexLength <= 1);
3482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3483 if (IEM_IS_MODRM_REG_MODE(bRm))
3484 {
3485 /*
3486 * Register, register.
3487 */
3488 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3489 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3490
3491 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3492 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3493 if (pVCpu->iem.s.uVexLength == 0)
3494 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3495 IEM_GET_MODRM_RM(pVCpu, bRm));
3496 else
3497 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3498 IEM_GET_MODRM_RM(pVCpu, bRm));
3499 IEM_MC_ADVANCE_RIP_AND_FINISH();
3500 IEM_MC_END();
3501 }
3502 else if (pVCpu->iem.s.uVexLength == 0)
3503 {
3504 /*
3505 * Register, memory128.
3506 */
3507 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3508 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3510
3511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3512 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3513 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3514 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3515
3516 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3517 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3518
3519 IEM_MC_ADVANCE_RIP_AND_FINISH();
3520 IEM_MC_END();
3521 }
3522 else
3523 {
3524 /*
3525 * Register, memory256.
3526 */
3527 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3528 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3530
3531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3534 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3535
3536 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3537 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3538
3539 IEM_MC_ADVANCE_RIP_AND_FINISH();
3540 IEM_MC_END();
3541 }
3542}
3543
3544/**
3545 * @opcode 0x6f
3546 * @oppfx 0xf3
3547 * @opcpuid avx
3548 * @opgroup og_avx_simdint_datamove
3549 * @opxcpttype 4UA
3550 * @optest op1=1 op2=2 -> op1=2
3551 * @optest op1=0 op2=-42 -> op1=-42
3552 */
3553FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3554{
3555 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3556 Assert(pVCpu->iem.s.uVexLength <= 1);
3557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3558 if (IEM_IS_MODRM_REG_MODE(bRm))
3559 {
3560 /*
3561 * Register, register.
3562 */
3563 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3564 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3565
3566 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3567 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3568 if (pVCpu->iem.s.uVexLength == 0)
3569 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3570 IEM_GET_MODRM_RM(pVCpu, bRm));
3571 else
3572 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3573 IEM_GET_MODRM_RM(pVCpu, bRm));
3574 IEM_MC_ADVANCE_RIP_AND_FINISH();
3575 IEM_MC_END();
3576 }
3577 else if (pVCpu->iem.s.uVexLength == 0)
3578 {
3579 /*
3580 * Register, memory128.
3581 */
3582 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3583 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3585
3586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3587 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3588 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3589 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3590
3591 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3592 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3593
3594 IEM_MC_ADVANCE_RIP_AND_FINISH();
3595 IEM_MC_END();
3596 }
3597 else
3598 {
3599 /*
3600 * Register, memory256.
3601 */
3602 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3603 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3605
3606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3607 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3608 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3609 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3610
3611 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3612 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3613
3614 IEM_MC_ADVANCE_RIP_AND_FINISH();
3615 IEM_MC_END();
3616 }
3617}
3618
3619
3620/* Opcode VEX.0F 0x70 - invalid */
3621
3622
3623/**
3624 * Common worker for AVX/AVX2 instructions on the forms:
3625 * - vpxxx xmm0, xmm2/mem128, imm8
3626 * - vpxxx ymm0, ymm2/mem256, imm8
3627 *
3628 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3629 */
3630FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3631{
3632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3633 if (IEM_IS_MODRM_REG_MODE(bRm))
3634 {
3635 /*
3636 * Register, register.
3637 */
3638 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3639 if (pVCpu->iem.s.uVexLength)
3640 {
3641 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3642 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3643 IEM_MC_LOCAL(RTUINT256U, uDst);
3644 IEM_MC_LOCAL(RTUINT256U, uSrc);
3645 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3646 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3647 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3648 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3649 IEM_MC_PREPARE_AVX_USAGE();
3650 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3651 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3652 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3653 IEM_MC_ADVANCE_RIP_AND_FINISH();
3654 IEM_MC_END();
3655 }
3656 else
3657 {
3658 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3659 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3660 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3661 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3662 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3663 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3664 IEM_MC_PREPARE_AVX_USAGE();
3665 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3666 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3667 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3668 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3669 IEM_MC_ADVANCE_RIP_AND_FINISH();
3670 IEM_MC_END();
3671 }
3672 }
3673 else
3674 {
3675 /*
3676 * Register, memory.
3677 */
3678 if (pVCpu->iem.s.uVexLength)
3679 {
3680 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3681 IEM_MC_LOCAL(RTUINT256U, uDst);
3682 IEM_MC_LOCAL(RTUINT256U, uSrc);
3683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3684 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3685 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3686
3687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3688 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3689 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3690 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3691 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3692 IEM_MC_PREPARE_AVX_USAGE();
3693
3694 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3695 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3696 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3697
3698 IEM_MC_ADVANCE_RIP_AND_FINISH();
3699 IEM_MC_END();
3700 }
3701 else
3702 {
3703 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3704 IEM_MC_LOCAL(RTUINT128U, uSrc);
3705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3706 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3707 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3708
3709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3710 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3712 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3714 IEM_MC_PREPARE_AVX_USAGE();
3715
3716 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3717 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3718 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3719 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3720
3721 IEM_MC_ADVANCE_RIP_AND_FINISH();
3722 IEM_MC_END();
3723 }
3724 }
3725}
3726
3727
3728/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3729FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3730{
3731 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3732 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3733 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3734
3735}
3736
3737
3738/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3739FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3740{
3741 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3742 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3743 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3744
3745}
3746
3747
3748/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3749FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3750{
3751 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3752 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3753 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3754}
3755
3756
3757/**
3758 * Common worker(s) for AVX/AVX2 instructions on the forms:
3759 * - vpxxx xmm0, xmm2, imm8
3760 * - vpxxx ymm0, ymm2, imm8
3761 *
3762 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3763 */
3764FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3765{
3766 if (IEM_IS_MODRM_REG_MODE(bRm))
3767 {
3768 /*
3769 * Register, register.
3770 */
3771 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3772 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3773 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3774 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3775 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3776 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3777 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3778 IEM_MC_PREPARE_AVX_USAGE();
3779 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3780 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3781 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3782 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 IEM_MC_END();
3785 }
3786 /* No memory operand. */
3787 else
3788 IEMOP_RAISE_INVALID_OPCODE_RET();
3789}
3790
3791FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3792{
3793 if (IEM_IS_MODRM_REG_MODE(bRm))
3794 {
3795 /*
3796 * Register, register.
3797 */
3798 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3799 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3800 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3801 IEM_MC_LOCAL(RTUINT256U, uDst);
3802 IEM_MC_LOCAL(RTUINT256U, uSrc);
3803 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3804 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3805 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3806 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3807 IEM_MC_PREPARE_AVX_USAGE();
3808 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3809 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3810 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
3811 IEM_MC_ADVANCE_RIP_AND_FINISH();
3812 IEM_MC_END();
3813 }
3814 /* No memory operand. */
3815 else
3816 IEMOP_RAISE_INVALID_OPCODE_RET();
3817}
3818
3819
3820/* Opcode VEX.0F 0x71 11/2 - invalid. */
3821/** Opcode VEX.66.0F 0x71 11/2. */
3822FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
3823{
3824 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3825 if (pVCpu->iem.s.uVexLength)
3826 {
3827 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3828 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
3829 }
3830 else
3831 {
3832 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3833 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
3834 }
3835}
3836
3837
3838/* Opcode VEX.0F 0x71 11/4 - invalid */
3839/** Opcode VEX.66.0F 0x71 11/4. */
3840FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
3841{
3842 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3843 if (pVCpu->iem.s.uVexLength)
3844 {
3845 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3846 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
3847 }
3848 else
3849 {
3850 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3851 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
3852 }
3853}
3854
3855/* Opcode VEX.0F 0x71 11/6 - invalid */
3856
3857/** Opcode VEX.66.0F 0x71 11/6. */
3858FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
3859{
3860 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3861 if (pVCpu->iem.s.uVexLength)
3862 {
3863 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3864 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
3865 }
3866 else
3867 {
3868 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3869 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
3870 }
3871}
3872
3873
3874/**
3875 * VEX Group 12 jump table for register variant.
3876 */
3877IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3878{
3879 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3880 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3881 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3882 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3883 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3884 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3885 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3886 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3887};
3888AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3889
3890
3891/** Opcode VEX.0F 0x71. */
3892FNIEMOP_DEF(iemOp_VGrp12)
3893{
3894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3895 if (IEM_IS_MODRM_REG_MODE(bRm))
3896 /* register, register */
3897 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3898 + pVCpu->iem.s.idxPrefix], bRm);
3899 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3900}
3901
3902
3903/* Opcode VEX.0F 0x72 11/2 - invalid. */
3904/** Opcode VEX.66.0F 0x72 11/2. */
3905FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
3906{
3907 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3908 if (pVCpu->iem.s.uVexLength)
3909 {
3910 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3911 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
3912 }
3913 else
3914 {
3915 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3916 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
3917 }
3918}
3919
3920
3921/* Opcode VEX.0F 0x72 11/4 - invalid. */
3922/** Opcode VEX.66.0F 0x72 11/4. */
3923FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
3924{
3925 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3926 if (pVCpu->iem.s.uVexLength)
3927 {
3928 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3929 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
3930 }
3931 else
3932 {
3933 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3934 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
3935 }
3936}
3937
3938/* Opcode VEX.0F 0x72 11/6 - invalid. */
3939
3940/** Opcode VEX.66.0F 0x72 11/6. */
3941FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
3942{
3943 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3944 if (pVCpu->iem.s.uVexLength)
3945 {
3946 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3947 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
3948 }
3949 else
3950 {
3951 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3952 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
3953 }
3954}
3955
3956
3957/**
3958 * Group 13 jump table for register variant.
3959 */
3960IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3961{
3962 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3963 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3964 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3965 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3966 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3967 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3968 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3969 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3970};
3971AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3972
3973/** Opcode VEX.0F 0x72. */
3974FNIEMOP_DEF(iemOp_VGrp13)
3975{
3976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3977 if (IEM_IS_MODRM_REG_MODE(bRm))
3978 /* register, register */
3979 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3980 + pVCpu->iem.s.idxPrefix], bRm);
3981 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3982}
3983
3984
3985/* Opcode VEX.0F 0x73 11/2 - invalid. */
3986/** Opcode VEX.66.0F 0x73 11/2. */
3987FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
3988{
3989 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3990 if (pVCpu->iem.s.uVexLength)
3991 {
3992 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3993 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
3994 }
3995 else
3996 {
3997 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3998 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
3999 }
4000}
4001
4002
4003/** Opcode VEX.66.0F 0x73 11/3. */
4004FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
4005{
4006 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4007 if (pVCpu->iem.s.uVexLength)
4008 {
4009 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4010 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
4011 }
4012 else
4013 {
4014 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4015 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
4016 }
4017}
4018
4019/* Opcode VEX.0F 0x73 11/6 - invalid. */
4020
4021/** Opcode VEX.66.0F 0x73 11/6. */
4022FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
4023{
4024 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4025 if (pVCpu->iem.s.uVexLength)
4026 {
4027 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4028 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
4029 }
4030 else
4031 {
4032 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4033 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
4034 }
4035}
4036
4037/** Opcode VEX.66.0F 0x73 11/7. */
4038FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
4039{
4040 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4041 if (pVCpu->iem.s.uVexLength)
4042 {
4043 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4044 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
4045 }
4046 else
4047 {
4048 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4049 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
4050 }
4051}
4052
4053/* Opcode VEX.0F 0x73 11/6 - invalid. */
4054
4055/**
4056 * Group 14 jump table for register variant.
4057 */
4058IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
4059{
4060 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4061 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4062 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4063 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4064 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4065 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4066 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4067 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4068};
4069AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
4070
4071
4072/** Opcode VEX.0F 0x73. */
4073FNIEMOP_DEF(iemOp_VGrp14)
4074{
4075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4076 if (IEM_IS_MODRM_REG_MODE(bRm))
4077 /* register, register */
4078 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4079 + pVCpu->iem.s.idxPrefix], bRm);
4080 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4081}
4082
4083
4084/* Opcode VEX.0F 0x74 - invalid */
4085
4086
4087/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
4088FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
4089{
4090 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4091 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
4092 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4093}
4094
4095/* Opcode VEX.F3.0F 0x74 - invalid */
4096/* Opcode VEX.F2.0F 0x74 - invalid */
4097
4098
4099/* Opcode VEX.0F 0x75 - invalid */
4100
4101
4102/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
4103FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
4104{
4105 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4106 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
4107 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4108}
4109
4110
4111/* Opcode VEX.F3.0F 0x75 - invalid */
4112/* Opcode VEX.F2.0F 0x75 - invalid */
4113
4114
4115/* Opcode VEX.0F 0x76 - invalid */
4116
4117
4118/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
4119FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
4120{
4121 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4122 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
4123 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4124}
4125
4126
4127/* Opcode VEX.F3.0F 0x76 - invalid */
4128/* Opcode VEX.F2.0F 0x76 - invalid */
4129
4130
4131/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
4132FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
4133{
4134 Assert(pVCpu->iem.s.uVexLength <= 1);
4135 if (pVCpu->iem.s.uVexLength == 0)
4136 {
4137 /*
4138 * 128-bit: vzeroupper
4139 */
4140 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
4141 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4142
4143 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4144 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4145 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4146
4147 IEM_MC_CLEAR_YREG_128_UP(0);
4148 IEM_MC_CLEAR_YREG_128_UP(1);
4149 IEM_MC_CLEAR_YREG_128_UP(2);
4150 IEM_MC_CLEAR_YREG_128_UP(3);
4151 IEM_MC_CLEAR_YREG_128_UP(4);
4152 IEM_MC_CLEAR_YREG_128_UP(5);
4153 IEM_MC_CLEAR_YREG_128_UP(6);
4154 IEM_MC_CLEAR_YREG_128_UP(7);
4155
4156 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4157 {
4158 IEM_MC_CLEAR_YREG_128_UP( 8);
4159 IEM_MC_CLEAR_YREG_128_UP( 9);
4160 IEM_MC_CLEAR_YREG_128_UP(10);
4161 IEM_MC_CLEAR_YREG_128_UP(11);
4162 IEM_MC_CLEAR_YREG_128_UP(12);
4163 IEM_MC_CLEAR_YREG_128_UP(13);
4164 IEM_MC_CLEAR_YREG_128_UP(14);
4165 IEM_MC_CLEAR_YREG_128_UP(15);
4166 }
4167
4168 IEM_MC_ADVANCE_RIP_AND_FINISH();
4169 IEM_MC_END();
4170 }
4171 else
4172 {
4173 /*
4174 * 256-bit: vzeroall
4175 */
4176 IEMOP_MNEMONIC(vzeroall, "vzeroall");
4177 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4178
4179 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4180 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4181 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4182
4183 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
4184 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
4185 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
4186 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
4187 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
4188 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4189 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4190 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4191 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4192
4193 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4194 {
4195 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4196 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
4197 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
4198 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
4199 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
4200 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
4201 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
4202 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
4203 }
4204
4205 IEM_MC_ADVANCE_RIP_AND_FINISH();
4206 IEM_MC_END();
4207 }
4208}
4209
4210
4211/* Opcode VEX.66.0F 0x77 - invalid */
4212/* Opcode VEX.F3.0F 0x77 - invalid */
4213/* Opcode VEX.F2.0F 0x77 - invalid */
4214
4215/* Opcode VEX.0F 0x78 - invalid */
4216/* Opcode VEX.66.0F 0x78 - invalid */
4217/* Opcode VEX.F3.0F 0x78 - invalid */
4218/* Opcode VEX.F2.0F 0x78 - invalid */
4219
4220/* Opcode VEX.0F 0x79 - invalid */
4221/* Opcode VEX.66.0F 0x79 - invalid */
4222/* Opcode VEX.F3.0F 0x79 - invalid */
4223/* Opcode VEX.F2.0F 0x79 - invalid */
4224
4225/* Opcode VEX.0F 0x7a - invalid */
4226/* Opcode VEX.66.0F 0x7a - invalid */
4227/* Opcode VEX.F3.0F 0x7a - invalid */
4228/* Opcode VEX.F2.0F 0x7a - invalid */
4229
4230/* Opcode VEX.0F 0x7b - invalid */
4231/* Opcode VEX.66.0F 0x7b - invalid */
4232/* Opcode VEX.F3.0F 0x7b - invalid */
4233/* Opcode VEX.F2.0F 0x7b - invalid */
4234
4235/* Opcode VEX.0F 0x7c - invalid */
4236/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4237FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
4238/* Opcode VEX.F3.0F 0x7c - invalid */
4239
4240
4241/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4242FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
4243{
4244 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4245 IEMOPMEDIAF3_INIT_VARS( vhaddps);
4246 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4247}
4248
4249
4250/* Opcode VEX.0F 0x7d - invalid */
4251/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4252FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
4253/* Opcode VEX.F3.0F 0x7d - invalid */
4254/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4255FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
4256
4257
4258/* Opcode VEX.0F 0x7e - invalid */
4259
4260FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4261{
4262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4263 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4264 {
4265 /**
4266 * @opcode 0x7e
4267 * @opcodesub rex.w=1
4268 * @oppfx 0x66
4269 * @opcpuid avx
4270 * @opgroup og_avx_simdint_datamov
4271 * @opxcpttype 5
4272 * @optest 64-bit / op1=1 op2=2 -> op1=2
4273 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4274 */
4275 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4276 if (IEM_IS_MODRM_REG_MODE(bRm))
4277 {
4278 /* greg64, XMM */
4279 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4280 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4281 IEM_MC_LOCAL(uint64_t, u64Tmp);
4282
4283 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4284 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4285
4286 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4287 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4288
4289 IEM_MC_ADVANCE_RIP_AND_FINISH();
4290 IEM_MC_END();
4291 }
4292 else
4293 {
4294 /* [mem64], XMM */
4295 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4297 IEM_MC_LOCAL(uint64_t, u64Tmp);
4298
4299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4300 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4301 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4302 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4303
4304 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4305 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4306
4307 IEM_MC_ADVANCE_RIP_AND_FINISH();
4308 IEM_MC_END();
4309 }
4310 }
4311 else
4312 {
4313 /**
4314 * @opdone
4315 * @opcode 0x7e
4316 * @opcodesub rex.w=0
4317 * @oppfx 0x66
4318 * @opcpuid avx
4319 * @opgroup og_avx_simdint_datamov
4320 * @opxcpttype 5
4321 * @opfunction iemOp_vmovd_q_Vy_Ey
4322 * @optest op1=1 op2=2 -> op1=2
4323 * @optest op1=0 op2=-42 -> op1=-42
4324 */
4325 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4326 if (IEM_IS_MODRM_REG_MODE(bRm))
4327 {
4328 /* greg32, XMM */
4329 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4330 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4331 IEM_MC_LOCAL(uint32_t, u32Tmp);
4332
4333 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4334 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4335
4336 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4337 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4338
4339 IEM_MC_ADVANCE_RIP_AND_FINISH();
4340 IEM_MC_END();
4341 }
4342 else
4343 {
4344 /* [mem32], XMM */
4345 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4347 IEM_MC_LOCAL(uint32_t, u32Tmp);
4348
4349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4350 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4352 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4353
4354 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4355 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4356
4357 IEM_MC_ADVANCE_RIP_AND_FINISH();
4358 IEM_MC_END();
4359 }
4360 }
4361}
4362
4363
4364/**
4365 * @opcode 0x7e
4366 * @oppfx 0xf3
4367 * @opcpuid avx
4368 * @opgroup og_avx_pcksclr_datamove
4369 * @opxcpttype none
4370 * @optest op1=1 op2=2 -> op1=2
4371 * @optest op1=0 op2=-42 -> op1=-42
4372 */
4373FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4374{
4375 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4377 if (IEM_IS_MODRM_REG_MODE(bRm))
4378 {
4379 /*
4380 * Register, register.
4381 */
4382 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4383 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4384
4385 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4386 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4387
4388 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4389 IEM_GET_MODRM_RM(pVCpu, bRm));
4390 IEM_MC_ADVANCE_RIP_AND_FINISH();
4391 IEM_MC_END();
4392 }
4393 else
4394 {
4395 /*
4396 * Memory, register.
4397 */
4398 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4399 IEM_MC_LOCAL(uint64_t, uSrc);
4400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4401
4402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4403 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4404 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4405 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4406
4407 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4408 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4409
4410 IEM_MC_ADVANCE_RIP_AND_FINISH();
4411 IEM_MC_END();
4412 }
4413
4414}
4415/* Opcode VEX.F2.0F 0x7e - invalid */
4416
4417
4418/* Opcode VEX.0F 0x7f - invalid */
4419
4420/**
4421 * @opcode 0x7f
4422 * @oppfx 0x66
4423 * @opcpuid avx
4424 * @opgroup og_avx_simdint_datamove
4425 * @opxcpttype 1
4426 * @optest op1=1 op2=2 -> op1=2
4427 * @optest op1=0 op2=-42 -> op1=-42
4428 */
4429FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4430{
4431 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4432 Assert(pVCpu->iem.s.uVexLength <= 1);
4433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4434 if (IEM_IS_MODRM_REG_MODE(bRm))
4435 {
4436 /*
4437 * Register, register.
4438 */
4439 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4440 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4441
4442 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4443 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4444 if (pVCpu->iem.s.uVexLength == 0)
4445 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4446 IEM_GET_MODRM_REG(pVCpu, bRm));
4447 else
4448 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4449 IEM_GET_MODRM_REG(pVCpu, bRm));
4450 IEM_MC_ADVANCE_RIP_AND_FINISH();
4451 IEM_MC_END();
4452 }
4453 else if (pVCpu->iem.s.uVexLength == 0)
4454 {
4455 /*
4456 * Register, memory128.
4457 */
4458 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4459 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4461
4462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4463 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4464 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4465 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4466
4467 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4468 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4469
4470 IEM_MC_ADVANCE_RIP_AND_FINISH();
4471 IEM_MC_END();
4472 }
4473 else
4474 {
4475 /*
4476 * Register, memory256.
4477 */
4478 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4479 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4481
4482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4485 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4486
4487 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4488 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4489
4490 IEM_MC_ADVANCE_RIP_AND_FINISH();
4491 IEM_MC_END();
4492 }
4493}
4494
4495
4496/**
4497 * @opcode 0x7f
4498 * @oppfx 0xf3
4499 * @opcpuid avx
4500 * @opgroup og_avx_simdint_datamove
4501 * @opxcpttype 4UA
4502 * @optest op1=1 op2=2 -> op1=2
4503 * @optest op1=0 op2=-42 -> op1=-42
4504 */
4505FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4506{
4507 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4508 Assert(pVCpu->iem.s.uVexLength <= 1);
4509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4510 if (IEM_IS_MODRM_REG_MODE(bRm))
4511 {
4512 /*
4513 * Register, register.
4514 */
4515 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4516 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4517
4518 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4519 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4520 if (pVCpu->iem.s.uVexLength == 0)
4521 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4522 IEM_GET_MODRM_REG(pVCpu, bRm));
4523 else
4524 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4525 IEM_GET_MODRM_REG(pVCpu, bRm));
4526 IEM_MC_ADVANCE_RIP_AND_FINISH();
4527 IEM_MC_END();
4528 }
4529 else if (pVCpu->iem.s.uVexLength == 0)
4530 {
4531 /*
4532 * Register, memory128.
4533 */
4534 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4535 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4537
4538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4539 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4540 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4541 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4542
4543 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4544 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4545
4546 IEM_MC_ADVANCE_RIP_AND_FINISH();
4547 IEM_MC_END();
4548 }
4549 else
4550 {
4551 /*
4552 * Register, memory256.
4553 */
4554 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4555 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4557
4558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4559 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4560 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4561 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4562
4563 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4564 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4565
4566 IEM_MC_ADVANCE_RIP_AND_FINISH();
4567 IEM_MC_END();
4568 }
4569}
4570
4571/* Opcode VEX.F2.0F 0x7f - invalid */
4572
4573
4574/* Opcode VEX.0F 0x80 - invalid */
4575/* Opcode VEX.0F 0x81 - invalid */
4576/* Opcode VEX.0F 0x82 - invalid */
4577/* Opcode VEX.0F 0x83 - invalid */
4578/* Opcode VEX.0F 0x84 - invalid */
4579/* Opcode VEX.0F 0x85 - invalid */
4580/* Opcode VEX.0F 0x86 - invalid */
4581/* Opcode VEX.0F 0x87 - invalid */
4582/* Opcode VEX.0F 0x88 - invalid */
4583/* Opcode VEX.0F 0x89 - invalid */
4584/* Opcode VEX.0F 0x8a - invalid */
4585/* Opcode VEX.0F 0x8b - invalid */
4586/* Opcode VEX.0F 0x8c - invalid */
4587/* Opcode VEX.0F 0x8d - invalid */
4588/* Opcode VEX.0F 0x8e - invalid */
4589/* Opcode VEX.0F 0x8f - invalid */
4590/* Opcode VEX.0F 0x90 - invalid */
4591/* Opcode VEX.0F 0x91 - invalid */
4592/* Opcode VEX.0F 0x92 - invalid */
4593/* Opcode VEX.0F 0x93 - invalid */
4594/* Opcode VEX.0F 0x94 - invalid */
4595/* Opcode VEX.0F 0x95 - invalid */
4596/* Opcode VEX.0F 0x96 - invalid */
4597/* Opcode VEX.0F 0x97 - invalid */
4598/* Opcode VEX.0F 0x98 - invalid */
4599/* Opcode VEX.0F 0x99 - invalid */
4600/* Opcode VEX.0F 0x9a - invalid */
4601/* Opcode VEX.0F 0x9b - invalid */
4602/* Opcode VEX.0F 0x9c - invalid */
4603/* Opcode VEX.0F 0x9d - invalid */
4604/* Opcode VEX.0F 0x9e - invalid */
4605/* Opcode VEX.0F 0x9f - invalid */
4606/* Opcode VEX.0F 0xa0 - invalid */
4607/* Opcode VEX.0F 0xa1 - invalid */
4608/* Opcode VEX.0F 0xa2 - invalid */
4609/* Opcode VEX.0F 0xa3 - invalid */
4610/* Opcode VEX.0F 0xa4 - invalid */
4611/* Opcode VEX.0F 0xa5 - invalid */
4612/* Opcode VEX.0F 0xa6 - invalid */
4613/* Opcode VEX.0F 0xa7 - invalid */
4614/* Opcode VEX.0F 0xa8 - invalid */
4615/* Opcode VEX.0F 0xa9 - invalid */
4616/* Opcode VEX.0F 0xaa - invalid */
4617/* Opcode VEX.0F 0xab - invalid */
4618/* Opcode VEX.0F 0xac - invalid */
4619/* Opcode VEX.0F 0xad - invalid */
4620
4621
4622/* Opcode VEX.0F 0xae mem/0 - invalid. */
4623/* Opcode VEX.0F 0xae mem/1 - invalid. */
4624
4625/**
4626 * @ opmaps grp15
4627 * @ opcode !11/2
4628 * @ oppfx none
4629 * @ opcpuid sse
4630 * @ opgroup og_sse_mxcsrsm
4631 * @ opxcpttype 5
4632 * @ optest op1=0 -> mxcsr=0
4633 * @ optest op1=0x2083 -> mxcsr=0x2083
4634 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4635 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4636 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4637 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4638 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4639 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4640 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4641 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4642 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4643 */
4644FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4645//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4646//{
4647// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4648// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4649// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4650// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4651// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4652// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4653// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4654// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4655// IEM_MC_END();
4656// return VINF_SUCCESS;
4657//}
4658
4659
4660/**
4661 * @opmaps vexgrp15
4662 * @opcode !11/3
4663 * @oppfx none
4664 * @opcpuid avx
4665 * @opgroup og_avx_mxcsrsm
4666 * @opxcpttype 5
4667 * @optest mxcsr=0 -> op1=0
4668 * @optest mxcsr=0x2083 -> op1=0x2083
4669 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4670 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4671 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4672 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4673 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4674 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4675 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4676 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4677 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4678 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4679 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4680 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4681 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4682 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4683 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4684 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4685 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4686 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4687 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4688 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4689 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4690 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4691 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4692 * -> value.xcpt=0x6
4693 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4694 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4695 * APMv4 rev 3.17 page 509.
4696 * @todo Test this instruction on AMD Ryzen.
4697 */
4698FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4699{
4700 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4701 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4702 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4704 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4705 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4706 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4707 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4708 IEM_MC_END();
4709}
4710
4711/* Opcode VEX.0F 0xae mem/4 - invalid. */
4712/* Opcode VEX.0F 0xae mem/5 - invalid. */
4713/* Opcode VEX.0F 0xae mem/6 - invalid. */
4714/* Opcode VEX.0F 0xae mem/7 - invalid. */
4715
4716/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4717/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4718/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4719/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4720/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4721/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4722/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4723/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4724
4725/**
4726 * Vex group 15 jump table for memory variant.
4727 */
4728IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4729{ /* pfx: none, 066h, 0f3h, 0f2h */
4730 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4731 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4732 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4733 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4734 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4735 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4736 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4737 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4738};
4739AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4740
4741
4742/** Opcode vex. 0xae. */
4743FNIEMOP_DEF(iemOp_VGrp15)
4744{
4745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4746 if (IEM_IS_MODRM_REG_MODE(bRm))
4747 /* register, register */
4748 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4749
4750 /* memory, register */
4751 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4752 + pVCpu->iem.s.idxPrefix], bRm);
4753}
4754
4755
4756/* Opcode VEX.0F 0xaf - invalid. */
4757
4758/* Opcode VEX.0F 0xb0 - invalid. */
4759/* Opcode VEX.0F 0xb1 - invalid. */
4760/* Opcode VEX.0F 0xb2 - invalid. */
4761/* Opcode VEX.0F 0xb2 - invalid. */
4762/* Opcode VEX.0F 0xb3 - invalid. */
4763/* Opcode VEX.0F 0xb4 - invalid. */
4764/* Opcode VEX.0F 0xb5 - invalid. */
4765/* Opcode VEX.0F 0xb6 - invalid. */
4766/* Opcode VEX.0F 0xb7 - invalid. */
4767/* Opcode VEX.0F 0xb8 - invalid. */
4768/* Opcode VEX.0F 0xb9 - invalid. */
4769/* Opcode VEX.0F 0xba - invalid. */
4770/* Opcode VEX.0F 0xbb - invalid. */
4771/* Opcode VEX.0F 0xbc - invalid. */
4772/* Opcode VEX.0F 0xbd - invalid. */
4773/* Opcode VEX.0F 0xbe - invalid. */
4774/* Opcode VEX.0F 0xbf - invalid. */
4775
4776/* Opcode VEX.0F 0xc0 - invalid. */
4777/* Opcode VEX.66.0F 0xc0 - invalid. */
4778/* Opcode VEX.F3.0F 0xc0 - invalid. */
4779/* Opcode VEX.F2.0F 0xc0 - invalid. */
4780
4781/* Opcode VEX.0F 0xc1 - invalid. */
4782/* Opcode VEX.66.0F 0xc1 - invalid. */
4783/* Opcode VEX.F3.0F 0xc1 - invalid. */
4784/* Opcode VEX.F2.0F 0xc1 - invalid. */
4785
4786/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4787FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4788/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4789FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4790/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4791FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4792/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4793FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4794
4795/* Opcode VEX.0F 0xc3 - invalid */
4796/* Opcode VEX.66.0F 0xc3 - invalid */
4797/* Opcode VEX.F3.0F 0xc3 - invalid */
4798/* Opcode VEX.F2.0F 0xc3 - invalid */
4799
4800/* Opcode VEX.0F 0xc4 - invalid */
4801
4802
4803/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4804FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4805{
4806 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4808 if (IEM_IS_MODRM_REG_MODE(bRm))
4809 {
4810 /*
4811 * Register, register.
4812 */
4813 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4814 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4815 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4816 IEM_MC_LOCAL(uint16_t, uValue);
4817
4818 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4819 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4820 IEM_MC_PREPARE_AVX_USAGE();
4821
4822 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4823 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
4824 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4825 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4826 IEM_MC_ADVANCE_RIP_AND_FINISH();
4827 IEM_MC_END();
4828 }
4829 else
4830 {
4831 /*
4832 * Register, memory.
4833 */
4834 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4836 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4837 IEM_MC_LOCAL(uint16_t, uValue);
4838
4839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4840 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4841 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4842 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4843 IEM_MC_PREPARE_AVX_USAGE();
4844
4845 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4846 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4847 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4848 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4849 IEM_MC_ADVANCE_RIP_AND_FINISH();
4850 IEM_MC_END();
4851 }
4852}
4853
4854
4855/* Opcode VEX.F3.0F 0xc4 - invalid */
4856/* Opcode VEX.F2.0F 0xc4 - invalid */
4857
4858/* Opcode VEX.0F 0xc5 - invalid */
4859
4860
4861/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4862FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4863{
4864 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4866 if (IEM_IS_MODRM_REG_MODE(bRm))
4867 {
4868 /*
4869 * greg32, XMM, imm8.
4870 */
4871 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4872 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4873 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4874 IEM_MC_LOCAL(uint16_t, uValue);
4875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4876 IEM_MC_PREPARE_AVX_USAGE();
4877 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
4878 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
4879 IEM_MC_ADVANCE_RIP_AND_FINISH();
4880 IEM_MC_END();
4881 }
4882 /* No memory operand. */
4883 else
4884 IEMOP_RAISE_INVALID_OPCODE_RET();
4885}
4886
4887
4888/* Opcode VEX.F3.0F 0xc5 - invalid */
4889/* Opcode VEX.F2.0F 0xc5 - invalid */
4890
4891
4892#define VSHUFP_X(a_Instr) \
4893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4894 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4895 { \
4896 /* \
4897 * Register, register. \
4898 */ \
4899 if (pVCpu->iem.s.uVexLength) \
4900 { \
4901 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4902 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4903 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4904 IEM_MC_LOCAL(RTUINT256U, uDst); \
4905 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4906 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4907 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4908 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4909 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4910 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4911 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4912 IEM_MC_PREPARE_AVX_USAGE(); \
4913 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4914 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4915 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4916 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4917 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4919 IEM_MC_END(); \
4920 } \
4921 else \
4922 { \
4923 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4924 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4925 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4926 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4927 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4928 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4929 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4930 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4931 IEM_MC_PREPARE_AVX_USAGE(); \
4932 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4933 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4934 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4935 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4936 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4937 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4938 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4939 IEM_MC_END(); \
4940 } \
4941 } \
4942 else \
4943 { \
4944 /* \
4945 * Register, memory. \
4946 */ \
4947 if (pVCpu->iem.s.uVexLength) \
4948 { \
4949 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4950 IEM_MC_LOCAL(RTUINT256U, uDst); \
4951 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4952 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4954 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4955 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4956 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4958 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4959 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4960 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4961 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4962 IEM_MC_PREPARE_AVX_USAGE(); \
4963 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4964 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4965 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4966 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4967 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4968 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4969 IEM_MC_END(); \
4970 } \
4971 else \
4972 { \
4973 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4974 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4976 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4977 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4978 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4980 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4981 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4982 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4983 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4984 IEM_MC_PREPARE_AVX_USAGE(); \
4985 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4986 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4987 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4988 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4989 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4990 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4992 IEM_MC_END(); \
4993 } \
4994 } \
4995 (void)0
4996
4997/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4998FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4999{
5000 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5001 VSHUFP_X(vshufps);
5002}
5003
5004
5005/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
5006FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
5007{
5008 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5009 VSHUFP_X(vshufpd);
5010}
5011#undef VSHUFP_X
5012
5013
5014/* Opcode VEX.F3.0F 0xc6 - invalid */
5015/* Opcode VEX.F2.0F 0xc6 - invalid */
5016
5017/* Opcode VEX.0F 0xc7 - invalid */
5018/* Opcode VEX.66.0F 0xc7 - invalid */
5019/* Opcode VEX.F3.0F 0xc7 - invalid */
5020/* Opcode VEX.F2.0F 0xc7 - invalid */
5021
5022/* Opcode VEX.0F 0xc8 - invalid */
5023/* Opcode VEX.0F 0xc9 - invalid */
5024/* Opcode VEX.0F 0xca - invalid */
5025/* Opcode VEX.0F 0xcb - invalid */
5026/* Opcode VEX.0F 0xcc - invalid */
5027/* Opcode VEX.0F 0xcd - invalid */
5028/* Opcode VEX.0F 0xce - invalid */
5029/* Opcode VEX.0F 0xcf - invalid */
5030
5031
5032/* Opcode VEX.0F 0xd0 - invalid */
5033/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
5034FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
5035/* Opcode VEX.F3.0F 0xd0 - invalid */
5036/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
5037FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
5038
5039/* Opcode VEX.0F 0xd1 - invalid */
5040/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
5041FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
5042{
5043 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5044 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
5045 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5046}
5047
5048/* Opcode VEX.F3.0F 0xd1 - invalid */
5049/* Opcode VEX.F2.0F 0xd1 - invalid */
5050
5051/* Opcode VEX.0F 0xd2 - invalid */
5052/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
5053FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
5054{
5055 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5056 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
5057 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5058}
5059
5060/* Opcode VEX.F3.0F 0xd2 - invalid */
5061/* Opcode VEX.F2.0F 0xd2 - invalid */
5062
5063/* Opcode VEX.0F 0xd3 - invalid */
5064/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
5065FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
5066{
5067 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5068 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
5069 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5070}
5071
5072/* Opcode VEX.F3.0F 0xd3 - invalid */
5073/* Opcode VEX.F2.0F 0xd3 - invalid */
5074
5075/* Opcode VEX.0F 0xd4 - invalid */
5076
5077
5078/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
5079FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
5080{
5081 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5082 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
5083 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5084}
5085
5086
5087/* Opcode VEX.F3.0F 0xd4 - invalid */
5088/* Opcode VEX.F2.0F 0xd4 - invalid */
5089
5090/* Opcode VEX.0F 0xd5 - invalid */
5091
5092
5093/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
5094FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
5095{
5096 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5097 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
5098 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5099}
5100
5101
5102/* Opcode VEX.F3.0F 0xd5 - invalid */
5103/* Opcode VEX.F2.0F 0xd5 - invalid */
5104
5105/* Opcode VEX.0F 0xd6 - invalid */
5106
5107/**
5108 * @opcode 0xd6
5109 * @oppfx 0x66
5110 * @opcpuid avx
5111 * @opgroup og_avx_pcksclr_datamove
5112 * @opxcpttype none
5113 * @optest op1=-1 op2=2 -> op1=2
5114 * @optest op1=0 op2=-42 -> op1=-42
5115 */
5116FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
5117{
5118 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5120 if (IEM_IS_MODRM_REG_MODE(bRm))
5121 {
5122 /*
5123 * Register, register.
5124 */
5125 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5126 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5127
5128 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5129 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5130
5131 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5132 IEM_GET_MODRM_REG(pVCpu, bRm));
5133 IEM_MC_ADVANCE_RIP_AND_FINISH();
5134 IEM_MC_END();
5135 }
5136 else
5137 {
5138 /*
5139 * Memory, register.
5140 */
5141 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5142 IEM_MC_LOCAL(uint64_t, uSrc);
5143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5144
5145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5146 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5147 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5148 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5149
5150 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5151 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5152
5153 IEM_MC_ADVANCE_RIP_AND_FINISH();
5154 IEM_MC_END();
5155 }
5156}
5157
5158/* Opcode VEX.F3.0F 0xd6 - invalid */
5159/* Opcode VEX.F2.0F 0xd6 - invalid */
5160
5161
5162/* Opcode VEX.0F 0xd7 - invalid */
5163
5164/** Opcode VEX.66.0F 0xd7 - */
5165FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
5166{
5167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5168 /* Docs says register only. */
5169 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
5170 {
5171 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
5172 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
5173 if (pVCpu->iem.s.uVexLength)
5174 {
5175 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5176 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
5177 IEM_MC_ARG(uint64_t *, puDst, 0);
5178 IEM_MC_LOCAL(RTUINT256U, uSrc);
5179 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
5180 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5181 IEM_MC_PREPARE_AVX_USAGE();
5182 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5183 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5184 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
5185 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
5186 IEM_MC_ADVANCE_RIP_AND_FINISH();
5187 IEM_MC_END();
5188 }
5189 else
5190 {
5191 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5192 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5193 IEM_MC_ARG(uint64_t *, puDst, 0);
5194 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5195 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5196 IEM_MC_PREPARE_AVX_USAGE();
5197 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5198 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5199 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
5200 IEM_MC_ADVANCE_RIP_AND_FINISH();
5201 IEM_MC_END();
5202 }
5203 }
5204 else
5205 IEMOP_RAISE_INVALID_OPCODE_RET();
5206}
5207
5208
5209/* Opcode VEX.F3.0F 0xd7 - invalid */
5210/* Opcode VEX.F2.0F 0xd7 - invalid */
5211
5212
5213/* Opcode VEX.0F 0xd8 - invalid */
5214
5215/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5216FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5217{
5218 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5219 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5220 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5221}
5222
5223
5224/* Opcode VEX.F3.0F 0xd8 - invalid */
5225/* Opcode VEX.F2.0F 0xd8 - invalid */
5226
5227/* Opcode VEX.0F 0xd9 - invalid */
5228
5229
5230/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5231FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5232{
5233 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5234 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5235 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5236}
5237
5238
5239/* Opcode VEX.F3.0F 0xd9 - invalid */
5240/* Opcode VEX.F2.0F 0xd9 - invalid */
5241
5242/* Opcode VEX.0F 0xda - invalid */
5243
5244
5245/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
5246FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
5247{
5248 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5249 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
5250 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5251}
5252
5253
5254/* Opcode VEX.F3.0F 0xda - invalid */
5255/* Opcode VEX.F2.0F 0xda - invalid */
5256
5257/* Opcode VEX.0F 0xdb - invalid */
5258
5259
5260/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
5261FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
5262{
5263 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5264 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5265 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
5266}
5267
5268
5269/* Opcode VEX.F3.0F 0xdb - invalid */
5270/* Opcode VEX.F2.0F 0xdb - invalid */
5271
5272/* Opcode VEX.0F 0xdc - invalid */
5273
5274
5275/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
5276FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
5277{
5278 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5279 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
5280 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5281}
5282
5283
5284/* Opcode VEX.F3.0F 0xdc - invalid */
5285/* Opcode VEX.F2.0F 0xdc - invalid */
5286
5287/* Opcode VEX.0F 0xdd - invalid */
5288
5289
5290/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
5291FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
5292{
5293 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5294 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5295 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5296}
5297
5298
5299/* Opcode VEX.F3.0F 0xdd - invalid */
5300/* Opcode VEX.F2.0F 0xdd - invalid */
5301
5302/* Opcode VEX.0F 0xde - invalid */
5303
5304
5305/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5306FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5307{
5308 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5309 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
5310 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5311}
5312
5313
5314/* Opcode VEX.F3.0F 0xde - invalid */
5315/* Opcode VEX.F2.0F 0xde - invalid */
5316
5317/* Opcode VEX.0F 0xdf - invalid */
5318
5319
5320/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5321FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5322{
5323 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5324 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5325 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5326}
5327
5328
5329/* Opcode VEX.F3.0F 0xdf - invalid */
5330/* Opcode VEX.F2.0F 0xdf - invalid */
5331
5332/* Opcode VEX.0F 0xe0 - invalid */
5333
5334
5335/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5336FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5337{
5338 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5339 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5340 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5341}
5342
5343
5344/* Opcode VEX.F3.0F 0xe0 - invalid */
5345/* Opcode VEX.F2.0F 0xe0 - invalid */
5346
5347/* Opcode VEX.0F 0xe1 - invalid */
5348/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5349FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
5350{
5351 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5352 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
5353 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5354}
5355
5356/* Opcode VEX.F3.0F 0xe1 - invalid */
5357/* Opcode VEX.F2.0F 0xe1 - invalid */
5358
5359/* Opcode VEX.0F 0xe2 - invalid */
5360/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5361FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
5362{
5363 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5364 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
5365 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5366}
5367
5368/* Opcode VEX.F3.0F 0xe2 - invalid */
5369/* Opcode VEX.F2.0F 0xe2 - invalid */
5370
5371/* Opcode VEX.0F 0xe3 - invalid */
5372
5373
5374/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5375FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5376{
5377 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5378 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5379 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5380}
5381
5382
5383/* Opcode VEX.F3.0F 0xe3 - invalid */
5384/* Opcode VEX.F2.0F 0xe3 - invalid */
5385
5386/* Opcode VEX.0F 0xe4 - invalid */
5387
5388
5389/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5390FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5391{
5392 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5393 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5394 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5395}
5396
5397
5398/* Opcode VEX.F3.0F 0xe4 - invalid */
5399/* Opcode VEX.F2.0F 0xe4 - invalid */
5400
5401/* Opcode VEX.0F 0xe5 - invalid */
5402
5403
5404/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5405FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5406{
5407 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5408 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5409 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5410}
5411
5412
5413/* Opcode VEX.F3.0F 0xe5 - invalid */
5414/* Opcode VEX.F2.0F 0xe5 - invalid */
5415
5416/* Opcode VEX.0F 0xe6 - invalid */
5417/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5418FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5419/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5420FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5421/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5422FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5423
5424
5425/* Opcode VEX.0F 0xe7 - invalid */
5426
5427/**
5428 * @opcode 0xe7
5429 * @opcodesub !11 mr/reg
5430 * @oppfx 0x66
5431 * @opcpuid avx
5432 * @opgroup og_avx_cachect
5433 * @opxcpttype 1
5434 * @optest op1=-1 op2=2 -> op1=2
5435 * @optest op1=0 op2=-42 -> op1=-42
5436 */
5437FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5438{
5439 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5440 Assert(pVCpu->iem.s.uVexLength <= 1);
5441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5442 if (IEM_IS_MODRM_MEM_MODE(bRm))
5443 {
5444 if (pVCpu->iem.s.uVexLength == 0)
5445 {
5446 /*
5447 * 128-bit: Memory, register.
5448 */
5449 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5450 IEM_MC_LOCAL(RTUINT128U, uSrc);
5451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5452
5453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5454 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5455 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5456 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5457
5458 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5459 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5460
5461 IEM_MC_ADVANCE_RIP_AND_FINISH();
5462 IEM_MC_END();
5463 }
5464 else
5465 {
5466 /*
5467 * 256-bit: Memory, register.
5468 */
5469 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5470 IEM_MC_LOCAL(RTUINT256U, uSrc);
5471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5472
5473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5474 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5475 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5476 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5477
5478 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5479 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5480
5481 IEM_MC_ADVANCE_RIP_AND_FINISH();
5482 IEM_MC_END();
5483 }
5484 }
5485 /**
5486 * @opdone
5487 * @opmnemonic udvex660fe7reg
5488 * @opcode 0xe7
5489 * @opcodesub 11 mr/reg
5490 * @oppfx 0x66
5491 * @opunused immediate
5492 * @opcpuid avx
5493 * @optest ->
5494 */
5495 else
5496 IEMOP_RAISE_INVALID_OPCODE_RET();
5497}
5498
5499/* Opcode VEX.F3.0F 0xe7 - invalid */
5500/* Opcode VEX.F2.0F 0xe7 - invalid */
5501
5502
5503/* Opcode VEX.0F 0xe8 - invalid */
5504
5505
5506/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5507FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5508{
5509 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5510 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5511 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5512}
5513
5514
5515/* Opcode VEX.F3.0F 0xe8 - invalid */
5516/* Opcode VEX.F2.0F 0xe8 - invalid */
5517
5518/* Opcode VEX.0F 0xe9 - invalid */
5519
5520
5521/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5522FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5523{
5524 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5525 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5526 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5527}
5528
5529
5530/* Opcode VEX.F3.0F 0xe9 - invalid */
5531/* Opcode VEX.F2.0F 0xe9 - invalid */
5532
5533/* Opcode VEX.0F 0xea - invalid */
5534
5535
5536/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5537FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5538{
5539 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5540 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
5541 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5542}
5543
5544
5545/* Opcode VEX.F3.0F 0xea - invalid */
5546/* Opcode VEX.F2.0F 0xea - invalid */
5547
5548/* Opcode VEX.0F 0xeb - invalid */
5549
5550
5551/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5552FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5553{
5554 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5555 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5556 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5557}
5558
5559
5560
5561/* Opcode VEX.F3.0F 0xeb - invalid */
5562/* Opcode VEX.F2.0F 0xeb - invalid */
5563
5564/* Opcode VEX.0F 0xec - invalid */
5565
5566
5567/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5568FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5569{
5570 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5571 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5572 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5573}
5574
5575
5576/* Opcode VEX.F3.0F 0xec - invalid */
5577/* Opcode VEX.F2.0F 0xec - invalid */
5578
5579/* Opcode VEX.0F 0xed - invalid */
5580
5581
5582/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5583FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5584{
5585 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5586 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5587 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5588}
5589
5590
5591/* Opcode VEX.F3.0F 0xed - invalid */
5592/* Opcode VEX.F2.0F 0xed - invalid */
5593
5594/* Opcode VEX.0F 0xee - invalid */
5595
5596
5597/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5598FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5599{
5600 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5601 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
5602 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5603}
5604
5605
5606/* Opcode VEX.F3.0F 0xee - invalid */
5607/* Opcode VEX.F2.0F 0xee - invalid */
5608
5609
5610/* Opcode VEX.0F 0xef - invalid */
5611
5612
5613/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5614FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5615{
5616 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5617 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5618 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5619}
5620
5621
5622/* Opcode VEX.F3.0F 0xef - invalid */
5623/* Opcode VEX.F2.0F 0xef - invalid */
5624
5625/* Opcode VEX.0F 0xf0 - invalid */
5626/* Opcode VEX.66.0F 0xf0 - invalid */
5627
5628
5629/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5630FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5631{
5632 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5633 Assert(pVCpu->iem.s.uVexLength <= 1);
5634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5635 if (IEM_IS_MODRM_REG_MODE(bRm))
5636 {
5637 /*
5638 * Register, register - (not implemented, assuming it raises \#UD).
5639 */
5640 IEMOP_RAISE_INVALID_OPCODE_RET();
5641 }
5642 else if (pVCpu->iem.s.uVexLength == 0)
5643 {
5644 /*
5645 * Register, memory128.
5646 */
5647 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5648 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5650
5651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5652 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5653 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5654 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5655
5656 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5657 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5658
5659 IEM_MC_ADVANCE_RIP_AND_FINISH();
5660 IEM_MC_END();
5661 }
5662 else
5663 {
5664 /*
5665 * Register, memory256.
5666 */
5667 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5668 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5670
5671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5672 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5673 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5674 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5675
5676 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5677 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5678
5679 IEM_MC_ADVANCE_RIP_AND_FINISH();
5680 IEM_MC_END();
5681 }
5682}
5683
5684
5685/* Opcode VEX.0F 0xf1 - invalid */
5686/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5687FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5688{
5689 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5690 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5691 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5692}
5693
5694/* Opcode VEX.F2.0F 0xf1 - invalid */
5695
5696/* Opcode VEX.0F 0xf2 - invalid */
5697/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5698FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5699{
5700 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5701 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5702 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5703}
5704/* Opcode VEX.F2.0F 0xf2 - invalid */
5705
5706/* Opcode VEX.0F 0xf3 - invalid */
5707/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5708FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5709{
5710 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5711 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5712 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5713}
5714/* Opcode VEX.F2.0F 0xf3 - invalid */
5715
5716/* Opcode VEX.0F 0xf4 - invalid */
5717
5718
5719/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5720FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5721{
5722 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5723 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5724 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5725}
5726
5727
5728/* Opcode VEX.F2.0F 0xf4 - invalid */
5729
5730/* Opcode VEX.0F 0xf5 - invalid */
5731
5732
5733/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5734FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
5735{
5736 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5737 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
5738 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5739}
5740
5741
5742/* Opcode VEX.F2.0F 0xf5 - invalid */
5743
5744/* Opcode VEX.0F 0xf6 - invalid */
5745
5746
5747/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5748FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5749{
5750 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5751 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5752 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5753}
5754
5755
5756/* Opcode VEX.F2.0F 0xf6 - invalid */
5757
5758/* Opcode VEX.0F 0xf7 - invalid */
5759
5760
5761/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5762FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
5763{
5764// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
5765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5766 if (IEM_IS_MODRM_REG_MODE(bRm))
5767 {
5768 /*
5769 * XMM, XMM, (implicit) [ ER]DI
5770 */
5771 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5772 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5773 IEM_MC_LOCAL( uint64_t, u64EffAddr);
5774 IEM_MC_LOCAL( RTUINT128U, u128Mem);
5775 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
5776 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
5777 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
5778 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5779 IEM_MC_PREPARE_AVX_USAGE();
5780
5781 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
5782 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
5783 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5784 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
5785 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
5786 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
5787
5788 IEM_MC_ADVANCE_RIP_AND_FINISH();
5789 IEM_MC_END();
5790 }
5791 else
5792 {
5793 /* The memory, register encoding is invalid. */
5794 IEMOP_RAISE_INVALID_OPCODE_RET();
5795 }
5796}
5797
5798
5799/* Opcode VEX.F2.0F 0xf7 - invalid */
5800
5801/* Opcode VEX.0F 0xf8 - invalid */
5802
5803
5804/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5805FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5806{
5807 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5808 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
5809 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5810}
5811
5812
5813/* Opcode VEX.F2.0F 0xf8 - invalid */
5814
5815/* Opcode VEX.0F 0xf9 - invalid */
5816
5817
5818/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5819FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5820{
5821 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5822 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
5823 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5824}
5825
5826
5827/* Opcode VEX.F2.0F 0xf9 - invalid */
5828
5829/* Opcode VEX.0F 0xfa - invalid */
5830
5831
5832/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5833FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5834{
5835 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5836 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
5837 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5838}
5839
5840
5841/* Opcode VEX.F2.0F 0xfa - invalid */
5842
5843/* Opcode VEX.0F 0xfb - invalid */
5844
5845
5846/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5847FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5848{
5849 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5850 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
5851 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5852}
5853
5854
5855/* Opcode VEX.F2.0F 0xfb - invalid */
5856
5857/* Opcode VEX.0F 0xfc - invalid */
5858
5859
5860/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5861FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5862{
5863 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5864 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
5865 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5866}
5867
5868
5869/* Opcode VEX.F2.0F 0xfc - invalid */
5870
5871/* Opcode VEX.0F 0xfd - invalid */
5872
5873
5874/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5875FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5876{
5877 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5878 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
5879 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5880}
5881
5882
5883/* Opcode VEX.F2.0F 0xfd - invalid */
5884
5885/* Opcode VEX.0F 0xfe - invalid */
5886
5887
5888/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5889FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5890{
5891 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5892 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
5893 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5894}
5895
5896
5897/* Opcode VEX.F2.0F 0xfe - invalid */
5898
5899
5900/** Opcode **** 0x0f 0xff - UD0 */
5901FNIEMOP_DEF(iemOp_vud0)
5902{
5903/** @todo testcase: vud0 */
5904 IEMOP_MNEMONIC(vud0, "vud0");
5905 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5906 {
5907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5908 if (IEM_IS_MODRM_MEM_MODE(bRm))
5909 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
5910 }
5911 IEMOP_HLP_DONE_DECODING();
5912 IEMOP_RAISE_INVALID_OPCODE_RET();
5913}
5914
5915
5916
5917/**
5918 * VEX opcode map \#1.
5919 *
5920 * @sa g_apfnTwoByteMap
5921 */
5922const PFNIEMOP g_apfnVexMap1[] =
5923{
5924 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5925 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5926 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5927 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5928 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5929 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5930 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5931 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5932 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5933 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5934 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5935 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5936 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5937 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5938 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5939 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5940 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5941
5942 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5943 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5944 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5945 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5946 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5947 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5948 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5949 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5950 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5951 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5952 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5953 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5954 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5955 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5956 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5957 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5958
5959 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5960 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5961 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5962 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5963 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5964 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5965 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5966 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5967 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5968 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5969 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5970 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5971 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5972 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5973 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5974 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5975
5976 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5977 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5978 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5979 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5980 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5981 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5982 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5983 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5984 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5985 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5986 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5987 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5988 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5989 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5990 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5991 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5992
5993 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5994 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5995 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5996 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5997 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5998 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5999 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
6000 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
6001 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
6002 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
6003 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
6004 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
6005 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
6006 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
6007 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
6008 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
6009
6010 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6011 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
6012 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6013 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6014 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6015 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6016 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6017 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6018 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
6019 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
6020 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
6021 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
6022 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
6023 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
6024 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
6025 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
6026
6027 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6028 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6029 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6030 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6031 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6032 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6033 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6034 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6035 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6036 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6037 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6038 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6039 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6040 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6041 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6042 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
6043
6044 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
6045 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6046 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6047 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6048 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6049 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6050 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6051 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6052 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
6053 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
6054 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
6055 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
6056 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
6057 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
6058 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
6059 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
6060
6061 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
6062 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
6063 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
6064 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
6065 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
6066 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
6067 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
6068 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
6069 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
6070 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
6071 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
6072 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
6073 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
6074 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
6075 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
6076 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
6077
6078 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
6079 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
6080 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
6081 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
6082 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
6083 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
6084 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
6085 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
6086 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
6087 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
6088 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
6089 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
6090 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
6091 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
6092 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
6093 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
6094
6095 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6096 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6097 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6098 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6099 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6100 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6101 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6102 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6103 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6104 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6105 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
6106 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
6107 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
6108 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
6109 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
6110 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
6111
6112 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6113 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6114 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6115 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6116 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6117 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6118 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6119 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6120 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6121 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6122 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
6123 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
6124 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
6125 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
6126 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
6127 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
6128
6129 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6130 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6131 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
6132 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6133 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6134 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6135 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
6136 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6137 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6138 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6139 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
6140 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
6141 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
6142 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
6143 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
6144 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
6145
6146 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
6147 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6148 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6149 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6150 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6151 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6152 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6153 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6154 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6155 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6156 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6157 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6158 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6159 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6160 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6161 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6162
6163 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6164 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6165 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6166 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6167 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6168 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6169 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
6170 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6171 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6172 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6173 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6174 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6175 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6176 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6177 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6178 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6179
6180 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
6181 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6182 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6183 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6184 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6185 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6186 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6187 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6188 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6189 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6190 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6191 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6192 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6193 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6194 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6195 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
6196};
6197AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
6198/** @} */
6199
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette